#!python
#
# Copyright (c) 2012-2025 Snowflake Computing Inc. All rights reserved.
#

"""
CLI tool for running Java/Scala Spark workloads against the SCOS server.

This tool:
  1. Adds the customer's JAR (and any dependency JARs) to the classpath
  2. Starts the SCOS server (which starts the JVM + gRPC server)
  3. Sets SPARK_REMOTE env var so SparkSession.builder().getOrCreate() connects
     automatically (zero customer code changes needed)
  4. Executes the specified main class via JPype
  5. Blocks until the Java application completes
  6. Shuts down the server and JVM

Examples:

  # Basic: run a single JAR
  snowpark-connect-execute-jar --jar-file app.jar --main-class com.example.MyApp

  # Custom port (useful when default 15002 is occupied)
  snowpark-connect-execute-jar --jar-file app.jar --main-class com.example.MyApp --port 15010

  # Thin JAR with external dependency JARs passed via --jars
  #   Build your app as a thin JAR (e.g. sbt package / mvn package), then
  #   supply Maven/Coursier-cached dependency JARs individually:
  snowpark-connect-execute-jar \\
      --jar-file target/scala-2.12/my-app_2.12-1.0.0.jar \\
      --main-class com.example.MyApp \\
      --jars /path/to/gson-2.10.1.jar,/path/to/commons-math3-3.6.1.jar,/path/to/guava-32.1.3-jre.jar

  # Uber JAR (all dependencies bundled, no --jars needed)
  #   Build with sbt-assembly or maven-shade-plugin, then run directly:
  snowpark-connect-execute-jar \\
      --jar-file target/scala-2.12/my-app-assembly-1.0.0.jar \\
      --main-class com.example.MyApp

  # JVM tuning (heap size, GC, etc.)
  snowpark-connect-execute-jar \\
      --jar-file app.jar \\
      --main-class com.example.MyApp \\
      --jvm-options="-Xmx4g -Xms1g"

  # Pass arguments to the Java main method (use -- to separate)
  snowpark-connect-execute-jar \\
      --jar-file app.jar \\
      --main-class com.example.MyApp \\
      -- --input-table MY_TABLE --output-table RESULTS
"""

import argparse
import logging
import sys


def main():
    parser = argparse.ArgumentParser(
        description="Run a Java/Scala Spark application against the SCOS server.",
        epilog=(
            "Examples:\n\n"
            "  # Basic: run a single JAR\n"
            "  snowpark-connect-execute-jar --jar-file app.jar --main-class com.example.MyApp\n\n"
            "  # Custom port\n"
            "  snowpark-connect-execute-jar --jar-file app.jar --main-class com.example.MyApp --port 15010\n\n"
            "  # Thin JAR + dependency JARs via --jars\n"
            "  snowpark-connect-execute-jar \\\n"
            "      --jar-file target/scala-2.12/my-app_2.12-1.0.0.jar \\\n"
            "      --main-class com.example.MyApp \\\n"
            "      --jars gson-2.10.1.jar,commons-math3-3.6.1.jar,guava-32.1.3-jre.jar\n\n"
            "  # Uber JAR (all dependencies bundled, no --jars needed)\n"
            "  snowpark-connect-execute-jar \\\n"
            "      --jar-file target/scala-2.12/my-app-assembly-1.0.0.jar \\\n"
            "      --main-class com.example.MyApp\n\n"
            "  # JVM tuning\n"
            "  snowpark-connect-execute-jar --jar-file app.jar --main-class com.example.MyApp \\\n"
            '      --jvm-options="-Xmx4g -Xms1g"\n\n'
            "  # Pass arguments to Java main method (after --)\n"
            "  snowpark-connect-execute-jar --jar-file app.jar --main-class com.example.MyApp \\\n"
            "      -- --input-table MY_TABLE --output-table RESULTS\n"
        ),
        formatter_class=argparse.RawDescriptionHelpFormatter,
    )
    parser.add_argument(
        "--jar-file",
        required=True,
        help="Path to the JAR file containing the main class.",
    )
    parser.add_argument(
        "--main-class",
        required=True,
        help="Fully qualified Java/Scala main class (e.g. com.example.MyApp).",
    )
    parser.add_argument(
        "--jars",
        default=None,
        help="Comma-separated dependency JARs or glob patterns added to classpath.",
    )
    parser.add_argument(
        "--port",
        type=int,
        default=15002,
        help="TCP port for the gRPC server (default: 15002).",
    )
    parser.add_argument(
        "--jvm-options",
        default=None,
        help='JVM options (e.g. "--jvm-options=-Xmx4g -Xms1g").',
    )
    parser.add_argument(
        "--verbose",
        action="store_true",
        help="Enable verbose (DEBUG) logging.",
    )
    parser.add_argument(
        "jar_args",
        nargs="*",
        help="Additional arguments passed to the Java main method (after --).",
    )

    args = parser.parse_args()

    log_level = logging.DEBUG if args.verbose else logging.INFO
    logging.basicConfig(
        level=log_level,
        format="%(asctime)s %(levelname)s [%(name)s] %(message)s",
    )

    additional_jars = args.jars.split(",") if args.jars else None
    jvm_options = args.jvm_options.split() if args.jvm_options else None

    from snowflake.snowpark_connect.server import execute_jar

    try:
        execute_jar(
            jar_path=args.jar_file,
            main_class=args.main_class,
            jar_args=args.jar_args if args.jar_args else None,
            additional_jars=additional_jars,
            tcp_port=args.port,
            jvm_options=jvm_options,
        )
    except Exception as e:
        logging.error(f"Job failed: {e}")
        sys.exit(1)


if __name__ == "__main__":
    main()
