Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ These images come in two variants, CPU and GPU, and include deep learning framew
Keras; popular Python packages like numpy, scikit-learn and pandas; and IDEs like Jupyter Lab. The distribution contains
the _latest_ versions of all these packages _such that_ they are _mutually compatible_.

Starting with v2.9.5+, the images include Amazon Q Agentic Chat integration for enhanced AI-powered development assistance in JupyterLab.
Starting with v2.9.5+ and new v3.5+, the images include Amazon Q Agentic Chat integration for enhanced AI-powered development assistance in JupyterLab.

### Amazon Q Agentic Chat Integration

Expand Down
44 changes: 24 additions & 20 deletions assets/extract_amazon_q_agentic_chat_urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
import json
import sys

def extract_urls(manifest_file, version, platform='linux', arch='x64'):

def extract_urls(manifest_file, version, platform="linux", arch="x64"):
"""Extract servers.zip and clients.zip URLs for specified platform/arch."""
try:
with open(manifest_file) as f:
Expand All @@ -13,34 +14,37 @@ def extract_urls(manifest_file, version, platform='linux', arch='x64'):
raise FileNotFoundError(f"Manifest file not found: {manifest_file}")
except json.JSONDecodeError as e:
raise ValueError(f"Invalid JSON in manifest file {manifest_file}: {str(e)}")
for ver in manifest['versions']:
if ver['serverVersion'] == version:
for target in ver['targets']:
if target['platform'] == platform and target.get('arch') == arch:

for ver in manifest["versions"]:
if ver["serverVersion"] == version:
for target in ver["targets"]:
if target["platform"] == platform and target.get("arch") == arch:
servers_url = None
clients_url = None
for content in target['contents']:
if content['filename'] == 'servers.zip':
servers_url = content['url']
elif content['filename'] == 'clients.zip':
clients_url = content['url']

for content in target["contents"]:
if content["filename"] == "servers.zip":
servers_url = content["url"]
elif content["filename"] == "clients.zip":
clients_url = content["url"]

if servers_url is None or clients_url is None:
raise ValueError(f"Required files (servers.zip/clients.zip) not found for version {version} {platform} {arch}")

raise ValueError(
f"Required files (servers.zip/clients.zip) not found for version {version} {platform} {arch}"
)

return servers_url, clients_url

raise ValueError(f"Version {version} not found for {platform} {arch}")

if __name__ == '__main__':

if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: extract_amazon_q_agentic_chat_urls.py <manifest_file> <version>")
sys.exit(1)

manifest_file, version = sys.argv[1], sys.argv[2]
servers_url, clients_url = extract_urls(manifest_file, version)

print(f"SERVERS_URL={servers_url}")
print(f"CLIENTS_URL={clients_url}")
print(f"CLIENTS_URL={clients_url}")
4 changes: 2 additions & 2 deletions src/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ def _copy_static_files(base_version_dir, new_version_dir, new_version_major, run
q_extract_script_path = os.path.relpath(f"assets/extract_amazon_q_agentic_chat_urls.py")
if os.path.exists(q_extract_script_path):
shutil.copy2(q_extract_script_path, new_version_dir)

q_download_script_path = os.path.relpath(f"assets/download_amazon_q_agentic_chat_artifacts.sh")
if os.path.exists(q_download_script_path):
shutil.copy2(q_download_script_path, new_version_dir)
Expand Down Expand Up @@ -277,7 +277,7 @@ def _build_local_images(
# Minimal patch build, use .patch Dockerfiles
dockerfile = f"./Dockerfile-{image_type}.patch"
else:
dockerfile="./Dockerfile"
dockerfile = "./Dockerfile"
try:
image, log_gen = _docker_client.images.build(
path=target_version_dir, dockerfile=dockerfile, rm=True, pull=True, buildargs=config["build_args"]
Expand Down
16 changes: 12 additions & 4 deletions src/package_report.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,12 @@ def _get_package_versions_in_upstream(target_packages_match_spec_out, target_ver
channel = match_spec_out.get("channel").channel_name
subdir_filter = "[subdir=" + match_spec_out.get("subdir") + "]"
try:
search_result = subprocess.run(["conda", "search", channel + "::" + package + ">=" + str(package_version) + subdir_filter, "--json"],
capture_output=True, text=True, check=True)
search_result = subprocess.run(
["conda", "search", channel + "::" + package + ">=" + str(package_version) + subdir_filter, "--json"],
capture_output=True,
text=True,
check=True,
)
# Load the result as json
package_metadata = json.loads(search_result.stdout)[package]
except (subprocess.CalledProcessError, json.JSONDecodeError, KeyError) as e:
Expand Down Expand Up @@ -279,8 +283,12 @@ def _generate_python_package_dependency_report(image_config, base_version_dir, t
for package, version in new_packages.items():
try:
# Pull package metadata from conda-forge and dump into json file
search_result = subprocess.run(["conda", "search", "-c", "conda-forge", f"{package}=={version}", "--json"],
capture_output=True, text=True, check=True)
search_result = subprocess.run(
["conda", "search", "-c", "conda-forge", f"{package}=={version}", "--json"],
capture_output=True,
text=True,
check=True,
)
package_metadata = json.loads(search_result.stdout)[package][0]
results[package] = {"version": package_metadata["version"], "depends": package_metadata["depends"]}
except Exception as e:
Expand Down
5 changes: 3 additions & 2 deletions src/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,8 +107,9 @@ def pull_conda_package_metadata(image_config, image_artifact_dir):
if str(match_spec_out).startswith("conda-forge"):
# Pull package metadata from conda-forge and dump into json file
try:
search_result = subprocess.run(["conda", "search", str(match_spec_out), "--json"],
capture_output=True, text=True, check=True)
search_result = subprocess.run(
["conda", "search", str(match_spec_out), "--json"], capture_output=True, text=True, check=True
)
package_metadata = json.loads(search_result.stdout)[package][0]
results[package] = {"version": package_metadata["version"], "size": package_metadata["size"]}
except (subprocess.CalledProcessError, json.JSONDecodeError, KeyError, IndexError) as e:
Expand Down
100 changes: 34 additions & 66 deletions test/test_amazon_q_agentic_chat_url_extraction.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,16 @@

import json
import os
import pytest
import tempfile
from unittest.mock import patch

import pytest

pytestmark = pytest.mark.unit

# Import the module under test
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'assets'))

sys.path.append(os.path.join(os.path.dirname(__file__), "..", "assets"))
from extract_amazon_q_agentic_chat_urls import extract_urls


Expand All @@ -28,25 +29,19 @@ def test_extract_urls_success(self):
"platform": "linux",
"arch": "x64",
"contents": [
{
"filename": "servers.zip",
"url": "https://example.com/servers.zip"
},
{
"filename": "clients.zip",
"url": "https://example.com/clients.zip"
}
]
{"filename": "servers.zip", "url": "https://example.com/servers.zip"},
{"filename": "clients.zip", "url": "https://example.com/clients.zip"},
],
}
]
],
}
]
}
with tempfile.NamedTemporaryFile(mode='w', suffix='.json', delete=False) as f:

with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f:
json.dump(manifest_data, f)
manifest_file = f.name

try:
servers_url, clients_url = extract_urls(manifest_file, "1.0.0")
assert servers_url == "https://example.com/servers.zip"
Expand All @@ -56,19 +51,12 @@ def test_extract_urls_success(self):

def test_extract_urls_version_not_found(self):
"""Test error when version is not found."""
manifest_data = {
"versions": [
{
"serverVersion": "1.0.0",
"targets": []
}
]
}

with tempfile.NamedTemporaryFile(mode='w', suffix='.json', delete=False) as f:
manifest_data = {"versions": [{"serverVersion": "1.0.0", "targets": []}]}

with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f:
json.dump(manifest_data, f)
manifest_file = f.name

try:
with pytest.raises(ValueError, match="Version 2.0.0 not found for linux x64"):
extract_urls(manifest_file, "2.0.0")
Expand All @@ -79,23 +67,14 @@ def test_extract_urls_platform_not_found(self):
"""Test error when platform/arch combination is not found."""
manifest_data = {
"versions": [
{
"serverVersion": "1.0.0",
"targets": [
{
"platform": "windows",
"arch": "x64",
"contents": []
}
]
}
{"serverVersion": "1.0.0", "targets": [{"platform": "windows", "arch": "x64", "contents": []}]}
]
}
with tempfile.NamedTemporaryFile(mode='w', suffix='.json', delete=False) as f:

with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f:
json.dump(manifest_data, f)
manifest_file = f.name

try:
with pytest.raises(ValueError, match="Version 1.0.0 not found for linux x64"):
extract_urls(manifest_file, "1.0.0")
Expand All @@ -112,22 +91,17 @@ def test_extract_urls_missing_files(self):
{
"platform": "linux",
"arch": "x64",
"contents": [
{
"filename": "other.zip",
"url": "https://example.com/other.zip"
}
]
"contents": [{"filename": "other.zip", "url": "https://example.com/other.zip"}],
}
]
],
}
]
}
with tempfile.NamedTemporaryFile(mode='w', suffix='.json', delete=False) as f:

with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f:
json.dump(manifest_data, f)
manifest_file = f.name

try:
with pytest.raises(ValueError, match=r"Required files \(servers.zip/clients.zip\) not found"):
extract_urls(manifest_file, "1.0.0")
Expand All @@ -145,25 +119,19 @@ def test_extract_urls_custom_platform(self):
"platform": "darwin",
"arch": "arm64",
"contents": [
{
"filename": "servers.zip",
"url": "https://example.com/darwin-servers.zip"
},
{
"filename": "clients.zip",
"url": "https://example.com/darwin-clients.zip"
}
]
{"filename": "servers.zip", "url": "https://example.com/darwin-servers.zip"},
{"filename": "clients.zip", "url": "https://example.com/darwin-clients.zip"},
],
}
]
],
}
]
}
with tempfile.NamedTemporaryFile(mode='w', suffix='.json', delete=False) as f:

with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f:
json.dump(manifest_data, f)
manifest_file = f.name

try:
servers_url, clients_url = extract_urls(manifest_file, "1.0.0", "darwin", "arm64")
assert servers_url == "https://example.com/darwin-servers.zip"
Expand All @@ -173,10 +141,10 @@ def test_extract_urls_custom_platform(self):

def test_extract_urls_invalid_json(self):
"""Test error handling for invalid JSON."""
with tempfile.NamedTemporaryFile(mode='w', suffix='.json', delete=False) as f:
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f:
f.write("invalid json")
manifest_file = f.name

try:
with pytest.raises(ValueError, match="Invalid JSON in manifest file"):
extract_urls(manifest_file, "1.0.0")
Expand All @@ -186,4 +154,4 @@ def test_extract_urls_invalid_json(self):
def test_extract_urls_file_not_found(self):
"""Test error handling for missing manifest file."""
with pytest.raises(FileNotFoundError):
extract_urls("nonexistent.json", "1.0.0")
extract_urls("nonexistent.json", "1.0.0")
Loading