Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions .chronus/changes/python-fix-docstring-2025-10-4-15-31-14.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
---
changeKind: fix
packages:
- "@typespec/http-client-python"
---

Fix bad indent
25 changes: 22 additions & 3 deletions packages/http-client-python/eng/scripts/ci/regenerate.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ const PLUGIN_DIR = argv.values.pluginDir
: resolve(fileURLToPath(import.meta.url), "../../../../");
const AZURE_HTTP_SPECS = resolve(PLUGIN_DIR, "node_modules/@azure-tools/azure-http-specs/specs");
const HTTP_SPECS = resolve(PLUGIN_DIR, "node_modules/@typespec/http-specs/specs");
const LOCAL_AZURE_SPECS = resolve(PLUGIN_DIR, "generator/test/azure/specs");
const GENERATED_FOLDER = argv.values.generatedFolder
? resolve(argv.values.generatedFolder)
: resolve(PLUGIN_DIR, "generator");
Expand Down Expand Up @@ -278,8 +279,25 @@ function toPosix(dir: string): string {
return dir.replace(/\\/g, "/");
}

// Classify a spec path to determine its root and whether it should be treated as an Azure spec.
function classifySpec(spec: string): { specDir: string; isAzure: boolean } {
const posixSpec = toPosix(spec);
if (posixSpec.startsWith(toPosix(AZURE_HTTP_SPECS) + "/")) {
return { specDir: AZURE_HTTP_SPECS, isAzure: true };
}
if (posixSpec.startsWith(toPosix(LOCAL_AZURE_SPECS) + "/")) {
// Local azure specs (in repo) should behave like azure specs for emitter options & naming.
return { specDir: LOCAL_AZURE_SPECS, isAzure: true };
}
if (posixSpec.startsWith(toPosix(HTTP_SPECS) + "/")) {
return { specDir: HTTP_SPECS, isAzure: false };
}
// Fallback: treat as non-azure and use HTTP_SPECS for relative path to avoid '..' segments.
return { specDir: HTTP_SPECS, isAzure: false };
}

function getEmitterOption(spec: string, flavor: string): Record<string, string>[] {
const specDir = spec.includes("azure") ? AZURE_HTTP_SPECS : HTTP_SPECS;
const { specDir } = classifySpec(spec);
const relativeSpec = toPosix(relative(specDir, spec));
const key = relativeSpec.includes("resiliency/srv-driven/old.tsp")
? relativeSpec
Expand Down Expand Up @@ -374,7 +392,7 @@ async function getSubdirectories(baseDir: string, flags: RegenerateFlags): Promi
}

function defaultPackageName(spec: string): string {
const specDir = spec.includes("azure") ? AZURE_HTTP_SPECS : HTTP_SPECS;
const { specDir } = classifySpec(spec);
return toPosix(relative(specDir, dirname(spec)))
.replace(/\//g, "-")
.toLowerCase();
Expand Down Expand Up @@ -480,11 +498,12 @@ async function regenerate(flags: RegenerateFlagsInput): Promise<void> {
await preprocess(flags);

const flagsResolved = { debug: false, flavor: flags.flavor, ...flags };
const subdirectoriesForLocalAzure = await getSubdirectories(LOCAL_AZURE_SPECS, flagsResolved);
const subdirectoriesForAzure = await getSubdirectories(AZURE_HTTP_SPECS, flagsResolved);
const subdirectoriesForNonAzure = await getSubdirectories(HTTP_SPECS, flagsResolved);
const subdirectories =
flags.flavor === "azure"
? [...subdirectoriesForAzure, ...subdirectoriesForNonAzure]
? [...subdirectoriesForLocalAzure, ...subdirectoriesForAzure, ...subdirectoriesForNonAzure]
: subdirectoriesForNonAzure;
const cmdList: TspCommand[] = subdirectories.flatMap((subdirectory) =>
_getCmdList(subdirectory, flagsResolved),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,21 +5,28 @@
{% set enable_custom_handling = "\n* " in doc_string or doc_string.startswith("* ") %}
{%- if enable_custom_handling -%}
{%- set lines = doc_string.split('\n') -%}
{%- set base_indent = wrap_string.lstrip('\n') -%}
{%- set result_lines = [] -%}
{%- for line in lines -%}
{%- if line.startswith('* ') -%}
{# Handle bullet points with proper continuation alignment #}
{%- set bullet_content = line[2:] -%}
{%- set base_indent = wrap_string.lstrip('\n') -%}
{%- set bullet_line = base_indent + ' * ' + bullet_content -%}
{%- set continuation_spaces = base_indent + ' ' -%}
{%- set wrapped = bullet_line | wordwrap(width=95, break_long_words=False, break_on_hyphens=False, wrapstring='\n' + continuation_spaces) -%}
{%- set _ = result_lines.append(wrapped) -%}
{%- elif line.strip() -%}
{%- set wrapped = line.strip() | wordwrap(width=95, break_long_words=False, break_on_hyphens=False, wrapstring=wrap_string) -%}
{%- set _ = result_lines.append(wrapped) -%}
{%- set line_indent = '' if line.strip().startswith(':') or loop.index == 1 else (base_indent + ' ') -%}
{%- set wrapped = (line_indent + line) | wordwrap(width=95, break_long_words=False, break_on_hyphens=False, wrapstring=wrap_string) -%}
{%- for line in wrapped.split('\n') -%}
{%- set prefix = "" if loop.index == 1 else " " -%}
{%- set _ = result_lines.append(prefix + line) -%}
{%- endfor -%}
{%- else -%}
{%- set _ = result_lines.append('') -%}
{# Do not add continuous blank lines #}
{%- if (result_lines and result_lines[-1] != '') or not result_lines -%}
{%- set _ = result_lines.append('') -%}
{%- endif -%}
{%- endif -%}
{%- endfor -%}
{%- set original_result = result_lines | join('\n') -%}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ azure-mgmt-core==1.6.0
-e ./generated/client-structure-multiclient
-e ./generated/client-structure-renamedoperation
-e ./generated/client-structure-twooperationgroup
-e ./generated/docstring
-e ./generated/resiliency-srv-driven1
-e ./generated/resiliency-srv-driven2

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
import "@typespec/http";

using Http;

@service(#{ title: "Doc Service" })
namespace Azure.DocString;

model DocModel {
@doc("""
An array of tools the model may call while generating a response.
You can specify which tool to use by setting the `tool_choice` parameter.

The two categories of tools you can provide the model are:

- **Built-in tools**: Tools that are provided by OpenAI that extend the model's capabilities, like file search.
- **Function calls (custom tools)**: Functions that are defined by you, enabling the model to call your own code.
""")
doc1: string;

@doc("""
Specifies the processing type used for serving the request.
* If set to 'auto', then the request will be processed with the service tier configured in the Project settings. Unless otherwise configured, the Project will use 'default'.
* If set to 'default', then the request will be processed with the standard pricing and performance for the selected model.
* If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or 'priority', then the request will be processed with the corresponding service tier. [Contact sales](https://openai.com/contact-sales) to learn more about Priority processing.
* When not set, the default behavior is 'auto'.

When the `service_tier` parameter is set, the response body will include the `service_tier`
value based on the processing mode actually used to serve the request. This response value
may be different from the value set in the parameter.
""")
doc2: string;
}

@route("/docstring")
interface DocString {
/** Get doc */
@get get(): DocModel;
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import hashlib
from pathlib import Path

_FILE_PATH = Path(__file__)


def string_to_hash_id(input_string):
"""
Converts a string to a SHA256 hash ID.

Args:
input_string (str): The string to be hashed.

Returns:
str: The hexadecimal representation of the SHA256 hash.
"""
# Encode the string to bytes, which is required by hashlib
encoded_string = input_string.encode("utf-8")

# Create a SHA256 hash object
hasher = hashlib.sha256()

# Update the hash object with the encoded string
hasher.update(encoded_string)

# Get the hexadecimal representation of the hash
hash_id = hasher.hexdigest()

return hash_id


def test_docstring_generation():

with open(
_FILE_PATH.parent.parent / "azure/generated/docstring/azure/docstring/models/_models.py", "r", encoding="utf-8"
) as f:
content = f.read()
hash_id = string_to_hash_id(content)

# We expect there shall be no changes for each regeneration so that we could make sure generated docstring is stable.
# Of course, if there are intentional changes to docstring generation logic, we need to update the expected hash value accordingly.
assert hash_id == "fe6f89d00143221dcedfb4ce69440600099662aa0ecf933ab463c4d2518ba3d0"
Loading