Created
September 17, 2024 15:06
-
-
Save dnebing/30dc25c33853a425d460a51c4040f59d to your computer and use it in GitHub Desktop.
This is a full script to generate a client module after merging multiple Liferay OpenAPI Headless specs.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env bash | |
| # Enable strict mode | |
| set -euo pipefail | |
| # Declare associative arrays | |
| declare -A file_object_map | |
| declare -A operationId_counts | |
| declare -A operationId_occurrences | |
| # Declare indexed arrays for input files | |
| declare -a input_files | |
| # Variables for options | |
| output_file="" | |
| generator_type="" | |
| client_output="" | |
| additional_properties="" | |
| # Function to display usage instructions | |
| usage() { | |
| echo "Usage: $0 -y <output.yaml> -g <type> -o <client_output> [-a <additional_properties>] <input1.yaml> <input2.yaml> ..." | |
| echo "" | |
| echo "Options:" | |
| echo " -y, --yaml <output.yaml> Specify the output YAML file for joined content." | |
| echo " -g, --generator <type> Specify the generator type." | |
| echo " -o, --output <client_output> Specify the client output directory." | |
| echo " -a, --additional-properties <props> Specify additional properties for the generator (optional)." | |
| echo "" | |
| echo "Example:" | |
| echo " $0 -y joined.yaml -g java -o client_output -a generateOsgi=true,withJava8=true file1.yaml file2.yaml" | |
| exit 1 | |
| } | |
| # Function to parse command-line arguments | |
| parse_args() { | |
| local yaml_set=false | |
| local generator_set=false | |
| local client_output_set=false | |
| while [[ "$#" -gt 0 ]]; do | |
| case "$1" in | |
| -y|--yaml) | |
| if [[ -n "${2:-}" && ! "$2" =~ ^- ]]; then | |
| if [ "$yaml_set" = true ]; then | |
| echo "Error: Multiple -y/--yaml arguments provided." >&2 | |
| usage | |
| fi | |
| output_file="$2" | |
| yaml_set=true | |
| shift 2 | |
| else | |
| echo "Error: Argument for $1 is missing." >&2 | |
| usage | |
| fi | |
| ;; | |
| -g|--generator) | |
| if [[ -n "${2:-}" && ! "$2" =~ ^- ]]; then | |
| if [ "$generator_set" = true ]; then | |
| echo "Error: Multiple -g/--generator arguments provided." >&2 | |
| usage | |
| fi | |
| generator_type="$2" | |
| generator_set=true | |
| shift 2 | |
| else | |
| echo "Error: Argument for $1 is missing." >&2 | |
| usage | |
| fi | |
| ;; | |
| -o|--output) | |
| if [[ -n "${2:-}" && ! "$2" =~ ^- ]]; then | |
| if [ "$client_output_set" = true ]; then | |
| echo "Error: Multiple -o/--output arguments provided." >&2 | |
| usage | |
| fi | |
| client_output="$2" | |
| client_output_set=true | |
| shift 2 | |
| else | |
| echo "Error: Argument for $1 is missing." >&2 | |
| usage | |
| fi | |
| ;; | |
| -a|--additional-properties) | |
| if [[ -n "${2:-}" && ! "$2" =~ ^- ]]; then | |
| if [ -n "$additional_properties" ]; then | |
| echo "Error: Multiple -a/--additional-properties arguments provided." >&2 | |
| usage | |
| fi | |
| additional_properties="$2" | |
| shift 2 | |
| else | |
| echo "Error: Argument for $1 is missing." >&2 | |
| usage | |
| fi | |
| ;; | |
| -*) | |
| echo "Unknown option: $1" >&2 | |
| usage | |
| ;; | |
| *) | |
| input_files+=("$1") | |
| shift | |
| ;; | |
| esac | |
| done | |
| if [ "$yaml_set" = false ]; then | |
| echo "Error: Output YAML file not specified. Use -y or --yaml to specify the output YAML file." >&2 | |
| usage | |
| fi | |
| if [ "$generator_set" = false ]; then | |
| echo "Error: Generator type not specified. Use -g or --generator to specify the generator type." >&2 | |
| usage | |
| fi | |
| if [ "$client_output_set" = false ]; then | |
| echo "Error: Client output directory not specified. Use -o or --output to specify the client output directory." >&2 | |
| usage | |
| fi | |
| if [ "${#input_files[@]}" -eq 0 ]; then | |
| echo "Error: No input YAML files provided." >&2 | |
| usage | |
| fi | |
| # Ensure output YAML file is not among input files | |
| for file in "${input_files[@]}"; do | |
| if [[ "$file" == "$output_file" ]]; then | |
| echo "Error: Output YAML file '$output_file' cannot be one of the input files." >&2 | |
| exit 1 | |
| fi | |
| done | |
| } | |
| # Function to check for required commands | |
| check_commands() { | |
| local missing_commands="" | |
| local required_commands=("gawk" "gsed" "yq" "npx" "docker") | |
| for cmd in "${required_commands[@]}"; do | |
| if ! command -v "$cmd" >/dev/null 2>&1; then | |
| missing_commands+="$cmd " | |
| fi | |
| done | |
| if [[ -n "$missing_commands" ]]; then | |
| echo "Error: The following required commands are missing:" | |
| for cmd in $missing_commands; do | |
| case "$cmd" in | |
| gawk) | |
| echo " - gawk: Install it using 'brew install gawk'" | |
| ;; | |
| gsed) | |
| echo " - gsed: Install it using 'brew install gnu-sed'" | |
| ;; | |
| yq) | |
| echo " - yq: Install it using 'brew install yq'" | |
| ;; | |
| npx) | |
| echo " - npx: Install it by installing Node.js from 'brew install node'" | |
| ;; | |
| docker) | |
| echo " - docker: Install it from https://docs.docker.com/get-docker/" | |
| ;; | |
| *) | |
| echo " - $cmd" | |
| ;; | |
| esac | |
| done | |
| exit 1 | |
| fi | |
| } | |
| # Function to check if a file is excluded | |
| is_excluded() { | |
| local file="$1" | |
| [[ "$file" == "$output_file" ]] | |
| } | |
| # Function to process all files | |
| process_files() { | |
| echo "Processing the following YAML files:" | |
| for file in "${input_files[@]}"; do | |
| if is_excluded "$file"; then | |
| echo " - $file (Excluded)" | |
| continue | |
| fi | |
| process_file "$file" | |
| done | |
| echo "Done" | |
| } | |
| # Function to process an individual file | |
| process_file() { | |
| local file="$1" | |
| if [ -f "$file" ]; then | |
| echo " - $file" | |
| object_name=$(extract_object_name "$file") | |
| echo " Object Name: $object_name" | |
| file_object_map["$file"]="$object_name" | |
| base_path=$(extract_base_path "$file") | |
| echo " Base Path: $base_path" | |
| fix_title "$file" "$object_name" | |
| # Rename duplicate path parameters | |
| rename_duplicate_path_parameters "$file" | |
| remove_base_path "$file" | |
| remove_openapi_paths "$file" | |
| prefix_paths "$file" "$base_path" | |
| add_default_response_description "$file" | |
| add_parameter_type_string "$file" | |
| else | |
| echo "Warning: File '$file' does not exist." >&2 | |
| fi | |
| } | |
| # Function to extract and format the object name | |
| extract_object_name() { | |
| local basename | |
| basename=$(basename "$1" .yaml) | |
| local object_name | |
| object_name=$(echo "$basename" | gawk '{gsub(/-/, " "); for (i=1; i<=NF; i++) $i=toupper(substr($i,1,1)) tolower(substr($i,2))} 1') | |
| echo "$object_name" | |
| } | |
| # Function to fix the title only if it is "Object" | |
| fix_title() { | |
| local file="$1" | |
| local object_name="$2" | |
| if [[ $(yq eval '.info.title' "$file") == "Object" ]]; then | |
| yq eval --inplace '.info.title = "'"$object_name"'"' "$file" | |
| fi | |
| } | |
| # Function to extract the base path from the server URL | |
| extract_base_path() { | |
| local file="$1" | |
| local url | |
| url=$(yq eval '.servers[0].url' "$file") | |
| local base_path | |
| base_path=$(echo "$url" | gsed -E 's|^https?://[^/]+||' | gsed 's|/$||') | |
| echo "$base_path" | |
| } | |
| # Function to rename duplicate path parameters | |
| rename_duplicate_path_parameters() { | |
| local file="$1" | |
| echo "Checking for duplicate path parameters in $file..." | |
| local paths | |
| paths=$(yq eval '.paths | keys | .[]' "$file") | |
| declare -A param_counts | |
| for path in $paths; do | |
| # Reset param_counts for each path | |
| param_counts=() | |
| # Extract parameter names from the path | |
| local params_in_path=() | |
| while IFS= read -r -d '}' match; do | |
| param_name=$(echo "$match" | gsed 's/.*{//') | |
| if [ -n "$param_name" ]; then | |
| params_in_path+=("$param_name") | |
| fi | |
| done <<<"$(echo "$path" | gsed -n 's/{[^}]*}/&}/g;p')" | |
| # Check for duplicates | |
| local duplicates=() | |
| for param in "${params_in_path[@]}"; do | |
| if [ -n "$param" ]; then | |
| param_counts["$param"]=$(( ${param_counts["$param"]:-0} + 1 )) | |
| if [ "${param_counts["$param"]}" -gt 1 ]; then | |
| duplicates+=("$param") | |
| fi | |
| fi | |
| done | |
| # Proceed only if there are duplicates | |
| if [ "${#duplicates[@]}" -gt 0 ]; then | |
| echo " Found duplicate parameters in path: $path" | |
| local new_path="$path" | |
| declare -A replacement_map=() | |
| for dup_param in "${duplicates[@]}"; do | |
| local count=0 | |
| # Iterate over params_in_path to find occurrences | |
| for idx in "${!params_in_path[@]}"; do | |
| if [ "${params_in_path[$idx]}" == "$dup_param" ]; then | |
| count=$((count + 1)) | |
| if [ "$count" -ge 2 ]; then | |
| # Create new parameter name with 'related' prefix | |
| local replacement_param_name="related$(echo "${dup_param:0:1}" | tr '[:lower:]' '[:upper:]')${dup_param:1}" | |
| params_in_path[$idx]="$replacement_param_name" | |
| replacement_map["$dup_param|$count"]="$replacement_param_name" | |
| fi | |
| fi | |
| done | |
| done | |
| # Reconstruct the new path using '||' as a delimiter | |
| IFS='||' read -ra path_parts <<< "$(echo "$path" | gsed -E 's/(\{[^}]*\})/||\1||/g')" | |
| count=0 | |
| new_path="" | |
| for part in "${path_parts[@]}"; do | |
| if [[ "$part" == "{"*"}" ]]; then | |
| param_name="${params_in_path[$count]}" | |
| new_path="${new_path}{${param_name}}" | |
| count=$((count +1)) | |
| else | |
| new_path="${new_path}${part}" | |
| fi | |
| done | |
| echo " Renaming path to: $new_path" | |
| # Update the path in the YAML | |
| yq eval --inplace 'with(.paths; .["'"$new_path"'"] = .["'"$path"'"] | del(.["'"$path"'"]))' "$file" | |
| # Update parameter names in methods | |
| local methods | |
| methods=$(yq eval ".paths[\"$new_path\"] | keys | .[]" "$file") | |
| for method in $methods; do | |
| local param_count | |
| param_count=$(yq eval ".paths[\"$new_path\"].$method.parameters | length" "$file" 2>/dev/null || echo 0) | |
| if [[ "$param_count" -gt 0 ]]; then | |
| declare -A occurrence_counts=() | |
| for ((i=0; i<param_count; i++)); do | |
| local param_name | |
| param_name=$(yq eval ".paths[\"$new_path\"].$method.parameters[$i].name" "$file") | |
| occurrence_counts["$param_name"]=$(( ${occurrence_counts["$param_name"]:-0} + 1 )) | |
| local occur_num=${occurrence_counts["$param_name"]} | |
| local replacement_key="$param_name|$occur_num" | |
| if [[ -n "${replacement_map[$replacement_key]:-}" ]]; then | |
| new_param_name="${replacement_map[$replacement_key]}" | |
| # Update the parameter name | |
| yq eval --inplace ".paths[\"$new_path\"].$method.parameters[$i].name = \"$new_param_name\"" "$file" | |
| echo " Updated parameter name: $param_name -> $new_param_name" | |
| fi | |
| done | |
| fi | |
| done | |
| fi | |
| done | |
| } | |
| # Function to remove the base path from the server URL | |
| remove_base_path() { | |
| local file="$1" | |
| yq eval --inplace '.servers[0].url |= sub("https?://[^/]+/[^ ]*", "http://localhost:8080/")' "$file" | |
| } | |
| # Function to remove paths that end with /openapi.{type} | |
| remove_openapi_paths() { | |
| local file="$1" | |
| local paths | |
| paths=$(yq eval '.paths | keys | .[]' "$file") | |
| for path in $paths; do | |
| if [[ "$path" == */openapi.* ]]; then | |
| yq eval --inplace 'del(.paths["'"$path"'"])' "$file" | |
| fi | |
| done | |
| } | |
| # Function to prefix paths with the base path and output dots for progress | |
| prefix_paths() { | |
| local file="$1" | |
| local base_path="$2" | |
| local paths | |
| paths=$(yq eval '.paths | keys | .[]' "$file") | |
| if [ -z "$paths" ]; then | |
| return | |
| fi | |
| printf "Path Update: " | |
| local printed_dot=false | |
| for path in $paths; do | |
| if [[ "$path" == \?* ]]; then | |
| new_path="? ${base_path}${path:2}" | |
| else | |
| new_path="${base_path}${path}" | |
| fi | |
| printf "." | |
| printed_dot=true | |
| yq eval --inplace '.paths["'"$new_path"'"] = .paths["'"$path"'"] | del(.paths["'"$path"'"])' "$file" | |
| done | |
| if [ "$printed_dot" = true ]; then | |
| echo "" | |
| fi | |
| } | |
| # Function to ensure default responses have a description and output dots for progress | |
| add_default_response_description() { | |
| local file="$1" | |
| printf "Desc Update: " | |
| local paths | |
| paths=$(yq eval '.paths | keys | .[]' "$file") | |
| for path in $paths; do | |
| local methods | |
| methods=$(yq eval ".paths[\"$path\"] | keys | .[]" "$file") | |
| for method in $methods; do | |
| printf "." | |
| local has_default_response | |
| has_default_response=$(yq eval ".paths[\"$path\"].$method.responses.default" "$file") | |
| local has_description | |
| has_description=$(yq eval ".paths[\"$path\"].$method.responses.default.description" "$file") | |
| if [[ "$has_default_response" != "null" && "$has_description" == "null" ]]; then | |
| echo "" | |
| echo "Adding default response description for $path [$method]" | |
| yq eval --inplace ".paths[\"$path\"].$method.responses.default.description = \"default response\"" "$file" | |
| fi | |
| done | |
| done | |
| echo "" | |
| } | |
| # Function to ensure all parameters have a schema with type: string | |
| add_parameter_type_string() { | |
| local file="$1" | |
| printf "Parm Update: " | |
| local paths | |
| paths=$(yq eval '.paths | keys | .[]' "$file") | |
| for path in $paths; do | |
| local methods | |
| methods=$(yq eval ".paths[\"$path\"] | keys | .[]" "$file") | |
| for method in $methods; do | |
| printf "." | |
| local param_count | |
| param_count=$(yq eval ".paths[\"$path\"].$method.parameters | length" "$file" 2>/dev/null || echo 0) | |
| if [[ "$param_count" -gt 0 ]]; then | |
| for ((i=0; i<param_count; i++)); do | |
| local param | |
| param=$(yq eval ".paths[\"$path\"].$method.parameters[$i].name" "$file") | |
| local has_schema | |
| has_schema=$(yq eval ".paths[\"$path\"].$method.parameters[$i].schema" "$file" 2>/dev/null || echo "null") | |
| if [[ "$has_schema" == "null" ]]; then | |
| yq eval --inplace '.paths["'"$path"'"].'"$method"'.parameters['"$i"'].schema = {"type": "string"}' "$file" | |
| echo "" | |
| echo "Adding schema type: string to parameter $param in $path [$method]" | |
| fi | |
| done | |
| fi | |
| done | |
| done | |
| echo "" | |
| } | |
| # Function to unify components across all YAML files | |
| unify_components() { | |
| local files=("$@") | |
| local temp_dir | |
| temp_dir=$(mktemp -d) | |
| declare -A component_definitions | |
| # Collect and store all component definitions | |
| for file in "${files[@]}"; do | |
| if is_excluded "$file"; then | |
| continue | |
| fi | |
| local components | |
| components=$(yq eval '.components.schemas | keys | .[]' "$file" 2>/dev/null || true) | |
| for comp in $components; do | |
| # Store the component definition | |
| component_definitions["$comp"]+="$file " | |
| done | |
| done | |
| # Unify components | |
| for comp in "${!component_definitions[@]}"; do | |
| local files_with_comp=(${component_definitions["$comp"]}) | |
| echo "Processing component: $comp" | |
| # Collect definitions from all files | |
| local defs=() | |
| for f in "${files_with_comp[@]}"; do | |
| yq eval ".components.schemas.$comp" "$f" > "$temp_dir/$comp-$(basename "$f").yaml" | |
| defs+=("$temp_dir/$comp-$(basename "$f").yaml") | |
| done | |
| # Check for $ref in definitions | |
| local has_ref=false | |
| for def in "${defs[@]}"; do | |
| if yq eval 'has("$ref")' "$def" >/dev/null; then | |
| has_ref=true | |
| break | |
| fi | |
| done | |
| if [ "$has_ref" = true ]; then | |
| # Use the first definition as the unified one | |
| unified_def=$(cat "${defs[0]}") | |
| else | |
| # Merge definitions | |
| unified_def=$(yq eval-all 'reduce .[] as $item ({}; . * $item )' "${defs[@]}") | |
| fi | |
| # Write unified definition to a temporary file | |
| echo "$unified_def" > "$temp_dir/merged_$comp.yaml" | |
| # Update all files with the unified definition using load() | |
| for f in "${files_with_comp[@]}"; do | |
| yq eval --inplace ".components.schemas[\"$comp\"] = load(\"$temp_dir/merged_$comp.yaml\")" "$f" | |
| done | |
| done | |
| rm -rf "$temp_dir" | |
| echo "Components unified across all YAML files." | |
| } | |
| # Function to resolve $refs by copying missing component definitions | |
| resolve_refs() { | |
| local files=("$@") | |
| local temp_dir | |
| temp_dir=$(mktemp -d) | |
| declare -A component_definitions | |
| # Pre-index all component definitions | |
| for file in "${files[@]}"; do | |
| if is_excluded "$file"; then | |
| continue | |
| fi | |
| local components | |
| components=$(yq eval '.components.schemas | keys | .[]' "$file" 2>/dev/null || true) | |
| for comp in $components; do | |
| # Store the component definition only once (first occurrence) | |
| if [[ -z "${component_definitions[$comp]:-}" ]]; then | |
| component_definitions["$comp"]=$(yq eval ".components.schemas.$comp" "$file") | |
| fi | |
| done | |
| done | |
| local added_component=true | |
| local iteration=0 | |
| local max_iterations=10 | |
| while [ "$added_component" = true ] && [ "$iteration" -lt "$max_iterations" ]; do | |
| iteration=$((iteration + 1)) | |
| echo "Resolve_refs Iteration: $iteration" | |
| added_component=false | |
| for file in "${files[@]}"; do | |
| if is_excluded "$file"; then | |
| continue | |
| fi | |
| echo "Resolving \$refs in $file" | |
| # Extract all $refs pointing to schemas | |
| local refs | |
| refs=$(yq eval '.. | select(has("$ref")) | ."$ref"' "$file" | grep '^#/components/schemas/' || true) | |
| for ref in $refs; do | |
| # Extract the component name from the $ref | |
| local comp | |
| comp=$(echo "$ref" | sed -E 's|#/components/schemas/||') | |
| # Check if the component is already defined in the current file | |
| local has_component | |
| has_component=$(yq eval ".components.schemas[\"$comp\"]" "$file" 2>/dev/null || echo "null") | |
| if [[ "$has_component" == "null" ]]; then | |
| # Check if we have the component definition in our index | |
| if [[ -n "${component_definitions[$comp]:-}" ]]; then | |
| echo "Adding missing component '$comp' to $file" | |
| # Write the component definition to a temp file | |
| echo "${component_definitions[$comp]}" > "$temp_dir/$comp.yaml" | |
| # Add the component definition to the file using load() | |
| yq eval --inplace ".components.schemas[\"$comp\"] = load(\"$temp_dir/$comp.yaml\")" "$file" | |
| added_component=true | |
| else | |
| echo "Warning: Component '$comp' referenced in $file but not found in any provided files." | |
| fi | |
| fi | |
| done | |
| done | |
| if [ "$added_component" = false ]; then | |
| echo "No new components added in this iteration." | |
| break | |
| fi | |
| done | |
| if [ "$iteration" -ge "$max_iterations" ]; then | |
| echo "Reached maximum iterations ($max_iterations) while resolving \$refs." | |
| fi | |
| rm -rf "$temp_dir" | |
| echo "All \$refs resolved." | |
| } | |
| # Function to resolve duplicate operationIds | |
| resolve_duplicate_operation_ids() { | |
| local files=("$@") | |
| local temp_file | |
| temp_file=$(mktemp) | |
| echo "Collecting all operationIds from provided YAML files..." | |
| # First pass: collect all operationIds | |
| for file in "${files[@]}"; do | |
| if is_excluded "$file"; then | |
| continue | |
| fi | |
| echo " Processing file: $file" | |
| # Extract all paths | |
| local paths | |
| paths=$(yq eval '.paths | keys | .[]' "$file" 2>/dev/null || true) | |
| for path in $paths; do | |
| # Extract all methods for the path | |
| local methods | |
| methods=$(yq eval ".paths[\"$path\"] | keys | .[]" "$file" 2>/dev/null || true) | |
| for method in $methods; do | |
| # Extract operationId | |
| local operation_id | |
| operation_id=$(yq eval ".paths[\"$path\"].$method.operationId" "$file" 2>/dev/null || echo "null") | |
| # Ensure operation_id is not null or empty | |
| if [[ "$operation_id" != "null" && -n "$operation_id" ]]; then | |
| # Append to temp_file: operationId,file,method,path | |
| echo "$operation_id,$file,$method,$path" >> "$temp_file" | |
| # Increment count with default value 0 if not set | |
| operationId_counts["$operation_id"]=$(( ${operationId_counts["$operation_id"]:-0} + 1 )) | |
| # Append occurrence | |
| operationId_occurrences["$operation_id"]+="$file|$method|$path;" | |
| fi | |
| done | |
| done | |
| done | |
| # Identify duplicate operationIds | |
| echo "Identifying duplicate operationIds..." | |
| declare -A duplicate_operationIds | |
| for opId in "${!operationId_counts[@]}"; do | |
| if [ "${operationId_counts[$opId]}" -gt 1 ]; then | |
| duplicate_operationIds["$opId"]=1 | |
| fi | |
| done | |
| if [ "${#duplicate_operationIds[@]}" -eq 0 ]; then | |
| echo "No duplicate operationIds found." | |
| rm "$temp_file" | |
| return | |
| fi | |
| echo "Found duplicate operationIds:" | |
| for dup in "${!duplicate_operationIds[@]}"; do | |
| echo " - $dup" | |
| done | |
| # Process each duplicate operationId | |
| for dup in "${!duplicate_operationIds[@]}"; do | |
| echo "Resolving duplicate operationId: $dup" | |
| # Get all occurrences | |
| IFS=';' read -ra occs <<< "${operationId_occurrences["$dup"]}" | |
| for occ in "${occs[@]}"; do | |
| # Skip empty occurrences (possible trailing ;) | |
| [[ -z "$occ" ]] && continue | |
| # Split into file, method, path | |
| IFS='|' read -r file method path <<< "$occ" | |
| # Get object_name from file_object_map | |
| local object_name="${file_object_map["$file"]}" | |
| # Remove spaces from object_name | |
| object_name="${object_name// /}" | |
| # Extract method prefix (ensure lowercase) | |
| local method_lower | |
| method_lower=$(echo "$method" | awk '{print tolower($0)}') | |
| # Determine if operationId starts with the method name (case insensitive) | |
| local new_operation_id | |
| if [[ "${dup,,}" == "${method_lower}"* ]]; then | |
| # Remove the method from the start (case insensitive) | |
| rest=$(echo "$dup" | sed -E "s/^${method_lower}//I") | |
| # Construct new operationId | |
| new_operation_id="${method_lower}${object_name}${rest}" | |
| else | |
| # If operationId doesn't start with method, prepend method and object name | |
| new_operation_id="${method_lower}${object_name}${dup}" | |
| fi | |
| echo " - Updating operationId in file: $file, path: $path, method: $method" | |
| echo " Old operationId: $dup" | |
| echo " New operationId: $new_operation_id" | |
| # Update the operationId in the file | |
| yq eval --inplace ".paths[\"$path\"][\"$method\"].operationId = \"${new_operation_id}\"" "$file" | |
| done | |
| done | |
| rm "$temp_file" | |
| echo "Duplicate operationIds have been resolved." | |
| } | |
| # Function to join YAML files using Redocly CLI | |
| join_yaml_files() { | |
| local output="$1" | |
| shift | |
| local files=("$@") | |
| # If output file exists, delete it | |
| if [ -f "$output" ]; then | |
| echo "Output file '$output' already exists. Deleting it before joining." | |
| rm "$output" | |
| fi | |
| # Join YAML files using Redocly CLI | |
| echo "Joining YAML files into '$output'..." | |
| npx @redocly/cli join -o "$output" "${files[@]}" | |
| echo "YAML files have been successfully joined into '$output'." | |
| } | |
| # Function to generate client using Docker with additional properties | |
| generate_client() { | |
| local yaml_file="$1" | |
| local generator_type="$2" | |
| local client_output="$3" | |
| echo "Generating client using OpenAPI Generator via Docker..." | |
| echo " Generator type: $generator_type" | |
| echo " Output directory: $client_output" | |
| # If client_output directory exists, delete it | |
| if [ -d "$client_output" ]; then | |
| echo "Output directory '$client_output' already exists. Deleting it before generating the client." | |
| rm -rf "$client_output" | |
| fi | |
| # Build the Docker command | |
| local docker_cmd=( | |
| docker run --rm | |
| -v "${PWD}:/local" | |
| openapitools/openapi-generator-cli generate | |
| -i "/local/$yaml_file" | |
| -g "$generator_type" | |
| -o "/local/$client_output" | |
| ) | |
| # Add additional properties if provided | |
| if [[ -n "$additional_properties" ]]; then | |
| echo " Additional properties: $additional_properties" | |
| docker_cmd+=(--additional-properties="$additional_properties") | |
| fi | |
| # Run the Docker command | |
| "${docker_cmd[@]}" | |
| echo "Client generated successfully in '$client_output'." | |
| } | |
| # Main execution flow | |
| main() { | |
| parse_args "$@" | |
| check_commands | |
| # Process input files | |
| process_files "${input_files[@]}" | |
| unify_components "${input_files[@]}" | |
| resolve_refs "${input_files[@]}" | |
| resolve_duplicate_operation_ids "${input_files[@]}" | |
| # Join YAML files | |
| join_yaml_files "$output_file" "${input_files[@]}" | |
| # Generate client | |
| generate_client "$output_file" "$generator_type" "$client_output" | |
| echo "Processing completed successfully." | |
| } | |
| # Run the main function with all passed arguments | |
| main "$@" |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment