Revamp questionnaire, parallelize run-all, add new tasks
- Replace 6 compound Likert questions with 12 atomic ones grouped by dimension (syntax, expressiveness, data/IO, errors, overall); drop free-form question. Responses now stored as ints, not strings. - Back-compat layer maps legacy keys to new dimensions so existing results still render. - Parallelize run-all with ThreadPoolExecutor (configurable workers) and add a thread-safe min-request-interval rate limiter to the Anthropic provider. - Add new tasks: path_normalizer, todo_manager, currency_converter, locale_weather_url, network_info_parser, url_normalizer.
This commit is contained in:
113
tasks/pipeline/currency_converter.toml
Normal file
113
tasks/pipeline/currency_converter.toml
Normal file
@@ -0,0 +1,113 @@
|
||||
name = "currency_converter"
|
||||
category = "pipeline"
|
||||
mode = "convert"
|
||||
description = """
|
||||
A currency converter that reads conversion requests from stdin.
|
||||
Each line has the format: AMOUNT FROM TO RATE
|
||||
- AMOUNT: a decimal number (e.g., 12.35)
|
||||
- FROM: 3-letter currency code
|
||||
- TO: 3-letter currency code
|
||||
- RATE: the exchange rate from FROM's base to TO's base
|
||||
|
||||
Some currencies are pegged to others at fixed rates:
|
||||
BAM is pegged to EUR at 1.95583
|
||||
BMD is pegged to USD at 1.0
|
||||
BND is pegged to SGD at 1.0
|
||||
DJF is pegged to USD at 177.721
|
||||
PAB is pegged to USD at 1.0
|
||||
|
||||
When a pegged currency is involved, the conversion must account for the
|
||||
peg coefficient. The formula is: result = amount * (rate / coef_from) * coef_to
|
||||
where coef is the peg ratio (1 if not pegged).
|
||||
|
||||
Output one line per input: "AMOUNT FROM = RESULT TO" with RESULT
|
||||
computed using bc with scale=2.
|
||||
For invalid lines (wrong field count or non-numeric amount), output "ERROR: <original line>".
|
||||
"""
|
||||
|
||||
bash_source = '''
|
||||
#!/bin/bash
|
||||
|
||||
pegged_to() {
|
||||
case "$1" in
|
||||
BAM) echo "EUR:1.95583" ;;
|
||||
BMD) echo "USD:1.0" ;;
|
||||
BND) echo "SGD:1.0" ;;
|
||||
DJF) echo "USD:177.721" ;;
|
||||
PAB) echo "USD:1.0" ;;
|
||||
*) echo "NONE:1" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
while IFS= read -r line || [[ -n "$line" ]]; do
|
||||
# Skip empty lines
|
||||
[[ -z "$line" ]] && continue
|
||||
|
||||
# Parse fields
|
||||
set -- $line
|
||||
if [[ $# -ne 4 ]]; then
|
||||
echo "ERROR: $line"
|
||||
continue
|
||||
fi
|
||||
|
||||
amount=$1
|
||||
from=$2
|
||||
to=$3
|
||||
rate=$4
|
||||
|
||||
# Validate amount is numeric
|
||||
if [[ ! "$amount" =~ ^[0-9]+(\.[0-9]+)?$ ]]; then
|
||||
echo "ERROR: $line"
|
||||
continue
|
||||
fi
|
||||
|
||||
# Validate rate is numeric
|
||||
if [[ ! "$rate" =~ ^[0-9]+(\.[0-9]+)?$ ]]; then
|
||||
echo "ERROR: $line"
|
||||
continue
|
||||
fi
|
||||
|
||||
# Get peg info
|
||||
peg_from=$(pegged_to "$from")
|
||||
coef_from=$(echo "$peg_from" | cut -d: -f2)
|
||||
|
||||
peg_to=$(pegged_to "$to")
|
||||
coef_to=$(echo "$peg_to" | cut -d: -f2)
|
||||
|
||||
# Calculate: result = amount * (rate / coef_from) * coef_to
|
||||
result=$(echo "scale=8; $amount * ($rate / $coef_from) * $coef_to" | bc)
|
||||
result=$(printf "%.2f" "$result")
|
||||
|
||||
echo "$amount $from = $result $to"
|
||||
done
|
||||
'''
|
||||
|
||||
[[test_cases]]
|
||||
description = "Standard conversion with direct rate"
|
||||
stdin = """100 USD EUR 0.92
|
||||
50 GBP JPY 188.50"""
|
||||
expected_stdout = """100 USD = 92.00 EUR
|
||||
50 GBP = 9425.00 JPY"""
|
||||
|
||||
[[test_cases]]
|
||||
description = "Pegged currency conversions"
|
||||
stdin = """100 BAM USD 1.08
|
||||
200 BMD EUR 0.92
|
||||
50 USD DJF 1.0"""
|
||||
expected_stdout = """100 BAM = 55.22 USD
|
||||
200 BMD = 184.00 EUR
|
||||
50 USD = 8886.05 DJF"""
|
||||
|
||||
[[test_cases]]
|
||||
description = "Invalid input lines"
|
||||
stdin = """abc EUR USD 0.92
|
||||
100 USD
|
||||
100 EUR USD 0.85"""
|
||||
expected_stdout = """ERROR: abc EUR USD 0.92
|
||||
ERROR: 100 USD
|
||||
100 EUR = 85.00 USD"""
|
||||
|
||||
[[test_cases]]
|
||||
description = "Pegged-to-pegged conversion"
|
||||
stdin = "100 BAM BMD 1.08"
|
||||
expected_stdout = "100 BAM = 55.22 BMD"
|
||||
76
tasks/pipeline/locale_weather_url.toml
Normal file
76
tasks/pipeline/locale_weather_url.toml
Normal file
@@ -0,0 +1,76 @@
|
||||
name = "locale_weather_url"
|
||||
category = "pipeline"
|
||||
mode = "convert"
|
||||
description = """
|
||||
Construct weather API URLs from locale and location information.
|
||||
Read lines from stdin in the format: LANG_CODE LOCATION
|
||||
where LANG_CODE is a 2-letter locale (e.g., "en", "fr", "de")
|
||||
and LOCATION is a city/place name (may contain spaces).
|
||||
|
||||
For each line, construct a URL in the format:
|
||||
https://LANG.wttr.in/LOCATION
|
||||
|
||||
Where spaces in the location are replaced with "+" characters.
|
||||
|
||||
If LANG_CODE is empty or invalid (not exactly 2 lowercase letters),
|
||||
default to "en".
|
||||
|
||||
Skip empty lines. Output one URL per input line.
|
||||
"""
|
||||
|
||||
bash_source = '''
|
||||
#!/bin/bash
|
||||
while IFS= read -r line || [[ -n "$line" ]]; do
|
||||
[[ -z "$line" ]] && continue
|
||||
|
||||
# Extract lang code (first field)
|
||||
lang=$(echo "$line" | awk '{print $1}')
|
||||
|
||||
# Extract location (everything after first field)
|
||||
location=$(echo "$line" | sed 's/^[^ ]* *//')
|
||||
|
||||
# Validate lang code: must be exactly 2 lowercase letters
|
||||
if [[ ! "$lang" =~ ^[a-z]{2}$ ]]; then
|
||||
lang="en"
|
||||
fi
|
||||
|
||||
# If location is same as lang (single-word line), skip
|
||||
if [[ "$location" == "$lang" || -z "$location" ]]; then
|
||||
location=""
|
||||
fi
|
||||
|
||||
# Replace spaces with +
|
||||
location=$(echo "$location" | tr ' ' '+')
|
||||
|
||||
echo "https://$lang.wttr.in/$location"
|
||||
done
|
||||
'''
|
||||
|
||||
[[test_cases]]
|
||||
description = "Various locales and locations"
|
||||
stdin = """en New York
|
||||
fr Paris
|
||||
de Berlin
|
||||
ja Tokyo"""
|
||||
expected_stdout = """https://en.wttr.in/New+York
|
||||
https://fr.wttr.in/Paris
|
||||
https://de.wttr.in/Berlin
|
||||
https://ja.wttr.in/Tokyo"""
|
||||
|
||||
[[test_cases]]
|
||||
description = "Multi-word locations"
|
||||
stdin = """en San Francisco
|
||||
es Buenos Aires
|
||||
pt Rio de Janeiro"""
|
||||
expected_stdout = """https://en.wttr.in/San+Francisco
|
||||
https://es.wttr.in/Buenos+Aires
|
||||
https://pt.wttr.in/Rio+de+Janeiro"""
|
||||
|
||||
[[test_cases]]
|
||||
description = "Invalid or missing locale defaults to en"
|
||||
stdin = """ENG London
|
||||
123 Moscow
|
||||
x Rome"""
|
||||
expected_stdout = """https://en.wttr.in/London
|
||||
https://en.wttr.in/Moscow
|
||||
https://en.wttr.in/Rome"""
|
||||
86
tasks/pipeline/network_info_parser.toml
Normal file
86
tasks/pipeline/network_info_parser.toml
Normal file
@@ -0,0 +1,86 @@
|
||||
name = "network_info_parser"
|
||||
category = "pipeline"
|
||||
mode = "convert"
|
||||
description = """
|
||||
Parse network interface configuration from stdin (in "ip addr show" format)
|
||||
and extract a summary of each interface.
|
||||
|
||||
For each interface block, output a line:
|
||||
IFACE: <name> IP: <ipv4_addr> MASK: /<prefix_len>
|
||||
|
||||
An interface block starts with a line like:
|
||||
2: eth0: <BROADCAST,MULTICAST,UP> mtu 1500 ...
|
||||
and contains inet lines like:
|
||||
inet 192.168.1.100/24 brd 192.168.1.255 scope global eth0
|
||||
|
||||
If an interface has no inet line, output:
|
||||
IFACE: <name> IP: none MASK: none
|
||||
|
||||
Skip the loopback interface (lo).
|
||||
"""
|
||||
|
||||
bash_source = '''
|
||||
#!/bin/bash
|
||||
|
||||
current_iface=""
|
||||
found_ip=""
|
||||
found_mask=""
|
||||
|
||||
flush_iface() {
|
||||
if [[ -n "$current_iface" && "$current_iface" != "lo" ]]; then
|
||||
if [[ -n "$found_ip" ]]; then
|
||||
echo "IFACE: $current_iface IP: $found_ip MASK: /$found_mask"
|
||||
else
|
||||
echo "IFACE: $current_iface IP: none MASK: none"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
while IFS= read -r line || [[ -n "$line" ]]; do
|
||||
# Detect interface line: starts with a number followed by colon
|
||||
if echo "$line" | grep -qE '^[0-9]+:'; then
|
||||
flush_iface
|
||||
current_iface=$(echo "$line" | awk -F: '{print $2}' | sed 's/^[[:space:]]*//' | awk '{print $1}')
|
||||
found_ip=""
|
||||
found_mask=""
|
||||
fi
|
||||
|
||||
# Detect inet line (IPv4 only, not inet6)
|
||||
if echo "$line" | grep -qE '^[[:space:]]+inet [0-9]'; then
|
||||
ip_cidr=$(echo "$line" | awk '{print $2}')
|
||||
found_ip=$(echo "$ip_cidr" | cut -d/ -f1)
|
||||
found_mask=$(echo "$ip_cidr" | cut -d/ -f2)
|
||||
fi
|
||||
done
|
||||
|
||||
flush_iface
|
||||
'''
|
||||
|
||||
[[test_cases]]
|
||||
description = "Two interfaces with IPs"
|
||||
stdin = """1: lo: <LOOPBACK,UP> mtu 65536
|
||||
inet 127.0.0.1/8 scope host lo
|
||||
2: eth0: <BROADCAST,MULTICAST,UP> mtu 1500
|
||||
inet 192.168.1.100/24 brd 192.168.1.255 scope global eth0
|
||||
3: wlan0: <BROADCAST,MULTICAST,UP> mtu 1500
|
||||
inet 10.0.0.42/16 brd 10.0.255.255 scope global wlan0"""
|
||||
expected_stdout = """IFACE: eth0 IP: 192.168.1.100 MASK: /24
|
||||
IFACE: wlan0 IP: 10.0.0.42 MASK: /16"""
|
||||
|
||||
[[test_cases]]
|
||||
description = "Interface with no IP"
|
||||
stdin = """1: lo: <LOOPBACK,UP> mtu 65536
|
||||
inet 127.0.0.1/8 scope host lo
|
||||
2: eth0: <BROADCAST,MULTICAST,UP> mtu 1500
|
||||
3: docker0: <NO-CARRIER,BROADCAST,MULTICAST,UP> mtu 1500
|
||||
inet 172.17.0.1/16 brd 172.17.255.255 scope global docker0"""
|
||||
expected_stdout = """IFACE: eth0 IP: none MASK: none
|
||||
IFACE: docker0 IP: 172.17.0.1 MASK: /16"""
|
||||
|
||||
[[test_cases]]
|
||||
description = "Single interface"
|
||||
stdin = """1: lo: <LOOPBACK,UP> mtu 65536
|
||||
inet 127.0.0.1/8 scope host lo
|
||||
2: enp3s0: <BROADCAST,MULTICAST,UP> mtu 9000
|
||||
inet 10.10.10.5/8 brd 10.255.255.255 scope global enp3s0"""
|
||||
expected_stdout = "IFACE: enp3s0 IP: 10.10.10.5 MASK: /8"
|
||||
79
tasks/pipeline/url_normalizer.toml
Normal file
79
tasks/pipeline/url_normalizer.toml
Normal file
@@ -0,0 +1,79 @@
|
||||
name = "url_normalizer"
|
||||
category = "pipeline"
|
||||
mode = "convert"
|
||||
description = """
|
||||
Read URLs from stdin, one per line. Normalize each URL:
|
||||
1. If the URL already starts with "https://", keep it as-is.
|
||||
2. If it starts with "http://", keep it as-is.
|
||||
3. Otherwise, prepend "http://" to it.
|
||||
4. After normalization, validate that the URL matches a basic pattern:
|
||||
it must have a protocol (http:// or https://), followed by at least
|
||||
one character, a dot, and at least one more character for the domain.
|
||||
5. Output the normalized URL, or "INVALID: <original>" for invalid entries.
|
||||
|
||||
Skip empty lines silently.
|
||||
"""
|
||||
|
||||
bash_source = '''
|
||||
#!/bin/bash
|
||||
while IFS= read -r line || [[ -n "$line" ]]; do
|
||||
# Skip empty lines
|
||||
[[ -z "$line" ]] && continue
|
||||
|
||||
# Trim whitespace
|
||||
url=$(echo "$line" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')
|
||||
[[ -z "$url" ]] && continue
|
||||
|
||||
original="$url"
|
||||
|
||||
# Check if it already has https://
|
||||
prefix8=$(echo "$url" | cut -c1-8)
|
||||
if [[ "$prefix8" == "https://" ]]; then
|
||||
normalized="$url"
|
||||
else
|
||||
prefix7=$(echo "$url" | cut -c1-7)
|
||||
if [[ "$prefix7" == "http://" ]]; then
|
||||
normalized="$url"
|
||||
else
|
||||
normalized="http://$url"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Validate: protocol + something.something
|
||||
if echo "$normalized" | grep -qE '^https?://[^/]+\.[^/]+'; then
|
||||
echo "$normalized"
|
||||
else
|
||||
echo "INVALID: $original"
|
||||
fi
|
||||
done
|
||||
'''
|
||||
|
||||
[[test_cases]]
|
||||
description = "URLs with and without protocol"
|
||||
stdin = """example.com
|
||||
http://example.com
|
||||
https://example.com
|
||||
www.google.com/search?q=test"""
|
||||
expected_stdout = """http://example.com
|
||||
http://example.com
|
||||
https://example.com
|
||||
http://www.google.com/search?q=test"""
|
||||
|
||||
[[test_cases]]
|
||||
description = "Invalid entries"
|
||||
stdin = """notaurl
|
||||
https://valid.example.com
|
||||
just-a-word"""
|
||||
expected_stdout = """INVALID: notaurl
|
||||
https://valid.example.com
|
||||
INVALID: just-a-word"""
|
||||
|
||||
[[test_cases]]
|
||||
description = "Mixed valid and empty lines"
|
||||
stdin = """https://secure.site.org/path
|
||||
|
||||
api.service.io:8080
|
||||
http://old.site.net"""
|
||||
expected_stdout = """https://secure.site.org/path
|
||||
http://api.service.io:8080
|
||||
http://old.site.net"""
|
||||
Reference in New Issue
Block a user