Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 19 additions & 1 deletion .ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,9 @@ ignore = [
"ANN002",
"ANN003",
"ANN401",
"TRY003",
"G004",
"TRY201",
]

select = [
Expand Down Expand Up @@ -57,6 +60,20 @@ select = [
"TD", # flake8-todos (check TODO format - Google Style §3.7)
"TCH",# flake8-type-checking (helps manage TYPE_CHECKING blocks and imports)
"PYI",# flake8-pyi (best practices for .pyi stub files, some rules are useful for .py too)
"S", # flake8-bandit (security issues)
"DTZ",# flake8-datetimez (timezone-aware datetimes)
"ERA",# flake8-eradicate (commented-out code)
"Q", # flake8-quotes (quote style consistency)
"RSE",# flake8-raise (modern raise statements)
"TRY",# tryceratops (exception handling best practices)
"PERF",# perflint (performance anti-patterns)
"BLE",
"T10",
"ICN",
"G",
"FIX",
"ASYNC",
"INP",
]

exclude = [
Expand Down Expand Up @@ -104,7 +121,7 @@ ignore-decorators = ["typing.overload", "abc.abstractmethod"]

[lint.flake8-annotations]
mypy-init-return = true
allow-star-arg-any = true
allow-star-arg-any = false

[lint.pep8-naming]
ignore-names = ["test_*", "setUp", "tearDown", "mock_*"]
Expand Down Expand Up @@ -139,6 +156,7 @@ inline-quotes = "single"
"types.py" = ["D", "E501"] # Ignore docstring and annotation issues in types.py
"proto_utils.py" = ["D102", "PLR0911"]
"helpers.py" = ["ANN001", "ANN201", "ANN202"]
"scripts/*.py" = ["INP001"]

[format]
exclude = [
Expand Down
96 changes: 50 additions & 46 deletions scripts/format.sh
Original file line number Diff line number Diff line change
Expand Up @@ -8,24 +8,23 @@ FORMAT_ALL=false
RUFF_UNSAFE_FIXES_FLAG=""

# Process command-line arguments
# We use a while loop with shift to process each argument
while [[ "$#" -gt 0 ]]; do
case "$1" in
--all)
FORMAT_ALL=true
echo "Detected --all flag: Formatting all Python files."
shift # Consume the argument
;;
--unsafe-fixes)
RUFF_UNSAFE_FIXES_FLAG="--unsafe-fixes"
echo "Detected --unsafe-fixes flag: Ruff will run with unsafe fixes."
shift # Consume the argument
;;
*)
# Handle unknown arguments or just ignore them if we only care about specific ones
echo "Warning: Unknown argument '$1'. Ignoring."
shift # Consume the argument
;;
--all)
FORMAT_ALL=true
echo "Detected --all flag: Formatting all tracked Python files."
shift # Consume the argument
;;
--unsafe-fixes)
RUFF_UNSAFE_FIXES_FLAG="--unsafe-fixes"
echo "Detected --unsafe-fixes flag: Ruff will run with unsafe fixes."
shift # Consume the argument
;;
*)
# Handle unknown arguments or just ignore them
echo "Warning: Unknown argument '$1'. Ignoring."
shift # Consume the argument
;;
esac
done

Expand All @@ -39,47 +38,52 @@ fi
CHANGED_FILES=""

if $FORMAT_ALL; then
echo "Formatting all Python files in the repository."
# Find all Python files, excluding grpc generated files as per original logic.
# `sort -u` ensures unique files and consistent ordering for display/xargs.
CHANGED_FILES=$(find . -name '*.py' -not -path './src/a2a/grpc/*' | sort -u)

if [ -z "$CHANGED_FILES" ]; then
echo "No Python files found to format."
exit 0
fi
echo "Finding all tracked Python files in the repository..."
CHANGED_FILES=$(git ls-files -- '*.py' ':!src/a2a/grpc/*')
else
echo "No '--all' flag found. Formatting changed Python files based on git diff."
echo "Finding changed Python files based on git diff..."
TARGET_BRANCH="origin/${GITHUB_BASE_REF:-main}"
git fetch origin "${GITHUB_BASE_REF:-main}" --depth=1

MERGE_BASE=$(git merge-base HEAD "$TARGET_BRANCH")

# Get python files changed in this PR, excluding grpc generated files
# Get python files changed in this PR, excluding grpc generated files.
CHANGED_FILES=$(git diff --name-only --diff-filter=ACMRTUXB "$MERGE_BASE" HEAD -- '*.py' ':!src/a2a/grpc/*')

if [ -z "$CHANGED_FILES" ]; then
echo "No changed Python files to format."
exit 0
fi
fi

echo "Files to be formatted:"
echo "$CHANGED_FILES"
# Exit if no files were found
if [ -z "$CHANGED_FILES" ]; then
echo "No changed or tracked Python files to format."
exit 0
fi

# Helper function to run formatters with the list of files.
# The list of files is passed to xargs via stdin.
# --- Helper Function ---
# Runs a command on a list of files passed via stdin.
# $1: A string containing the list of files (space-separated).
# $2...: The command and its arguments to run.
run_formatter() {
echo "$CHANGED_FILES" | xargs -r "$@"
local files_to_format="$1"
shift # Remove the file list from the arguments
if [ -n "$files_to_format" ]; then
echo "$files_to_format" | xargs -r "$@"
fi
}

echo "Running pyupgrade..."
run_formatter pyupgrade --exit-zero-even-if-changed --py310-plus
echo "Running autoflake..."
run_formatter autoflake -i -r --remove-all-unused-imports
echo "Running ruff check (fix-only)..."
run_formatter ruff check --fix $RUFF_UNSAFE_FIXES_FLAG
echo "Running ruff format..."
run_formatter ruff format
# --- Python File Formatting ---
if [ -n "$CHANGED_FILES" ]; then
echo "--- Formatting Python Files ---"
echo "Files to be formatted:"
echo "$CHANGED_FILES"

echo "Running autoflake..."
run_formatter "$CHANGED_FILES" autoflake -i -r --remove-all-unused-imports
echo "Running ruff check (fix-only)..."
run_formatter "$CHANGED_FILES" ruff check --fix-only $RUFF_UNSAFE_FIXES_FLAG
echo "Running ruff format..."
run_formatter "$CHANGED_FILES" ruff format
echo "Python formatting complete."
else
echo "No Python files to format."
fi

echo "Formatting complete."
echo "All formatting tasks are complete."
2 changes: 1 addition & 1 deletion scripts/grpc_gen_post_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def process_generated_code(src_folder: str = 'src/a2a/grpc') -> None:
else:
print('No changes needed')

except Exception as e:
except Exception as e: # noqa: BLE001
print(f'Error processing file {file}: {e}')
sys.exit(1)

Expand Down
Empty file.
3 changes: 1 addition & 2 deletions src/a2a/server/apps/jsonrpc/jsonrpc_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -317,8 +317,7 @@ async def _handle_requests(self, request: Request) -> Response: # noqa: PLR0911
)
raise e
except Exception as e:
logger.error(f'Unhandled exception: {e}')
traceback.print_exc()
logger.exception('Unhandled exception')
return self._generate_error_response(
request_id, A2AError(root=InternalError(message=str(e)))
)
Expand Down
8 changes: 3 additions & 5 deletions src/a2a/server/events/event_consumer.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,13 +140,11 @@ async def consume_all(self) -> AsyncGenerator[Event]:
# python 3.12 and get a queue empty error on an open queue
if self.queue.is_closed():
break
except ValidationError as e:
logger.error(f'Invalid event format received: {e}')
except ValidationError:
logger.exception('Invalid event format received')
continue
except Exception as e:
logger.error(
f'Stopping event consumption due to exception: {e}'
)
logger.exception('Stopping event consumption due to exception')
self._exception = e
continue

Expand Down
5 changes: 3 additions & 2 deletions src/a2a/server/events/event_queue.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,8 +147,9 @@ async def close(self) -> None:
# Otherwise, join the queue
else:
tasks = [asyncio.create_task(self.queue.join())]
for child in self._children:
tasks.append(asyncio.create_task(child.close()))
tasks.extend(
asyncio.create_task(child.close()) for child in self._children
)
await asyncio.wait(tasks, return_when=asyncio.ALL_COMPLETED)

def is_closed(self) -> bool:
Expand Down
4 changes: 2 additions & 2 deletions src/a2a/server/events/in_memory_queue_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ async def add(self, task_id: str, queue: EventQueue) -> None:
"""
async with self._lock:
if task_id in self._task_queue:
raise TaskQueueExists()
raise TaskQueueExists
self._task_queue[task_id] = queue

async def get(self, task_id: str) -> EventQueue | None:
Expand Down Expand Up @@ -67,7 +67,7 @@ async def close(self, task_id: str) -> None:
"""
async with self._lock:
if task_id not in self._task_queue:
raise NoTaskQueue()
raise NoTaskQueue
queue = self._task_queue.pop(task_id)
await queue.close()

Expand Down
20 changes: 8 additions & 12 deletions src/a2a/server/request_handlers/default_request_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@
should_populate_referred_tasks=False, task_store=self.task_store
)
)
# TODO: Likely want an interface for managing this, like AgentExecutionManager.

Check failure on line 101 in src/a2a/server/request_handlers/default_request_handler.py

View workflow job for this annotation

GitHub Actions / Lint Code Base

Ruff (FIX002)

src/a2a/server/request_handlers/default_request_handler.py:101:11: FIX002 Line contains TODO, consider resolving the issue
self._running_agents = {}
self._running_agents_lock = asyncio.Lock()

Expand Down Expand Up @@ -232,7 +232,7 @@

queue = await self._queue_manager.create_or_tap(task_id)
result_aggregator = ResultAggregator(task_manager)
# TODO: to manage the non-blocking flows.

Check failure on line 235 in src/a2a/server/request_handlers/default_request_handler.py

View workflow job for this annotation

GitHub Actions / Lint Code Base

Ruff (FIX002)

src/a2a/server/request_handlers/default_request_handler.py:235:11: FIX002 Line contains TODO, consider resolving the issue
producer_task = asyncio.create_task(
self._run_event_stream(request_context, queue)
)
Expand Down Expand Up @@ -293,7 +293,7 @@
consumer, blocking=blocking
)
if not result:
raise ServerError(error=InternalError())

Check failure on line 296 in src/a2a/server/request_handlers/default_request_handler.py

View workflow job for this annotation

GitHub Actions / Lint Code Base

Ruff (TRY301)

src/a2a/server/request_handlers/default_request_handler.py:296:17: TRY301 Abstract `raise` to an inner function

if isinstance(result, Task):
self._validate_task_id_match(task_id, result.id)
Expand All @@ -302,12 +302,12 @@
task_id, result_aggregator
)

except Exception as e:
logger.error(f'Agent execution failed. Error: {e}')
except Exception:
logger.exception('Agent execution failed')
raise
finally:
if interrupted_or_non_blocking:
# TODO: Track this disconnected cleanup task.

Check failure on line 310 in src/a2a/server/request_handlers/default_request_handler.py

View workflow job for this annotation

GitHub Actions / Lint Code Base

Ruff (FIX002)

src/a2a/server/request_handlers/default_request_handler.py:310:19: FIX002 Line contains TODO, consider resolving the issue
asyncio.create_task( # noqa: RUF006
self._cleanup_producer(producer_task, task_id)
)
Expand Down Expand Up @@ -478,16 +478,12 @@
params.id
)

task_push_notification_config = []
if push_notification_config_list:
for config in push_notification_config_list:
task_push_notification_config.append(
TaskPushNotificationConfig(
task_id=params.id, push_notification_config=config
)
)

return task_push_notification_config
return [
TaskPushNotificationConfig(
task_id=params.id, push_notification_config=config
)
for config in push_notification_config_list
]

async def on_delete_task_push_notification_config(
self,
Expand Down
8 changes: 4 additions & 4 deletions src/a2a/server/tasks/base_push_notification_sender.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,9 +64,9 @@ async def _dispatch_notification(
logger.info(
f'Push-notification sent for task_id={task.id} to URL: {url}'
)
return True
except Exception as e:
logger.error(
f'Error sending push-notification for task_id={task.id} to URL: {url}. Error: {e}'
except Exception:
logger.exception(
f'Error sending push-notification for task_id={task.id} to URL: {url}.'
)
return False
return True
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ def _from_orm(
decrypted_payload
)
except (json.JSONDecodeError, ValidationError) as e:
logger.error(
logger.exception(
'Failed to parse decrypted push notification config for task %s, config %s. '
'Data is corrupted or not valid JSON after decryption.',
model_instance.task_id,
Expand All @@ -201,7 +201,7 @@ def _from_orm(
return PushNotificationConfig.model_validate_json(payload)
except (json.JSONDecodeError, ValidationError) as e:
if self._fernet:
logger.error(
logger.exception(
'Failed to parse push notification config for task %s, config %s. '
'Decryption failed and the data is not valid JSON. '
'This likely indicates the data is corrupted or encrypted with a different key.',
Expand All @@ -210,7 +210,7 @@ def _from_orm(
)
else:
# if no key is configured and the payload is not valid JSON.
logger.error(
logger.exception(
'Failed to parse push notification config for task %s, config %s. '
'Data is not valid JSON and no encryption key is configured.',
model_instance.task_id,
Expand Down Expand Up @@ -252,12 +252,11 @@ async def get_info(self, task_id: str) -> list[PushNotificationConfig]:
for model in models:
try:
configs.append(self._from_orm(model))
except ValueError as e:
logger.error(
'Could not deserialize push notification config for task %s, config %s: %s',
except ValueError:
logger.exception(
'Could not deserialize push notification config for task %s, config %s',
model.task_id,
model.config_id,
e,
)
return configs

Expand Down
2 changes: 1 addition & 1 deletion src/a2a/utils/error_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ async def wrapper(*args: Any, **kwargs: Any) -> Response:
return JSONResponse(
content={'message': error.message}, status_code=http_code
)
except Exception as e:
except Exception as e: # noqa: BLE001
logger.log(logging.ERROR, f'Unknown error occurred {e}')
return JSONResponse(
content={'message': 'unknown exception'}, status_code=500
Expand Down
21 changes: 8 additions & 13 deletions src/a2a/utils/proto_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -338,16 +338,12 @@ def security(
) -> list[a2a_pb2.Security] | None:
if not security:
return None
rval: list[a2a_pb2.Security] = []
for s in security:
rval.append(
a2a_pb2.Security(
schemes={
k: a2a_pb2.StringList(list=v) for (k, v) in s.items()
}
)
return [
a2a_pb2.Security(
schemes={k: a2a_pb2.StringList(list=v) for (k, v) in s.items()}
)
return rval
for s in security
]

@classmethod
def security_schemes(
Expand Down Expand Up @@ -774,10 +770,9 @@ def security(
) -> list[dict[str, list[str]]] | None:
if not security:
return None
rval: list[dict[str, list[str]]] = []
for s in security:
rval.append({k: list(v.list) for (k, v) in s.schemes.items()})
return rval
return [
{k: list(v.list) for (k, v) in s.schemes.items()} for s in security
]

@classmethod
def provider(
Expand Down
Loading
Loading