spark_applications
Creates, updates, deletes, gets or lists a spark_applications
resource.
Overview
Name | spark_applications |
Type | Resource |
Id | google.dataproc.spark_applications |
Fields
The following fields are returned by SELECT
queries:
SELECT
not supported for this resource, use SHOW METHODS
to view available operations for the resource.
Methods
The following methods are available for this resource:
Parameters
Parameters can be passed in the WHERE
clause of a query. Check the Methods section to see which parameters are required or optional for each operation.
Name | Datatype | Description |
---|---|---|
batchesId | string | |
locationsId | string | |
projectsId | string | |
sessionsId | string | |
sparkApplicationsId | string | |
applicationStatus | string | |
details | boolean | |
executionId | string (int64) | |
executorStatus | string | |
jobId | string (int64) | |
jobIds | string (int64) | |
jobStatus | string | |
maxEndTime | string (google-datetime) | |
maxTime | string (google-datetime) | |
minEndTime | string (google-datetime) | |
minTime | string (google-datetime) | |
operationIds | string | |
pageSize | integer (int32) | |
pageToken | string | |
parent | string | |
planDescription | boolean | |
sortRuntime | boolean | |
stageAttemptId | integer (int32) | |
stageId | string (int64) | |
stageIds | string (int64) | |
stageStatus | string | |
summaryMetricsMask | string (google-fieldmask) | |
taskStatus | string |
Lifecycle Methods
- projects_locations_batches_spark_applications_write
- projects_locations_batches_spark_applications_search
- projects_locations_batches_spark_applications_access
- projects_locations_batches_spark_applications_search_jobs
- projects_locations_batches_spark_applications_access_job
- projects_locations_batches_spark_applications_search_stages
- projects_locations_batches_spark_applications_search_stage_attempts
- projects_locations_batches_spark_applications_access_stage_attempt
- projects_locations_batches_spark_applications_search_stage_attempt_tasks
- projects_locations_batches_spark_applications_search_executors
- projects_locations_batches_spark_applications_search_executor_stage_summary
- projects_locations_batches_spark_applications_search_sql_queries
- projects_locations_batches_spark_applications_access_sql_query
- projects_locations_batches_spark_applications_access_sql_plan
- projects_locations_batches_spark_applications_access_stage_rdd_graph
- projects_locations_batches_spark_applications_access_environment_info
- projects_locations_batches_spark_applications_summarize_jobs
- projects_locations_batches_spark_applications_summarize_stages
- projects_locations_batches_spark_applications_summarize_stage_attempt_tasks
- projects_locations_batches_spark_applications_summarize_executors
- projects_locations_sessions_spark_applications_write
- projects_locations_sessions_spark_applications_search
- projects_locations_sessions_spark_applications_access
- projects_locations_sessions_spark_applications_search_jobs
- projects_locations_sessions_spark_applications_access_job
- projects_locations_sessions_spark_applications_search_stages
- projects_locations_sessions_spark_applications_search_stage_attempts
- projects_locations_sessions_spark_applications_access_stage_attempt
- projects_locations_sessions_spark_applications_search_stage_attempt_tasks
- projects_locations_sessions_spark_applications_search_executors
- projects_locations_sessions_spark_applications_search_executor_stage_summary
- projects_locations_sessions_spark_applications_search_sql_queries
- projects_locations_sessions_spark_applications_access_sql_query
- projects_locations_sessions_spark_applications_access_sql_plan
- projects_locations_sessions_spark_applications_access_stage_rdd_graph
- projects_locations_sessions_spark_applications_access_environment_info
- projects_locations_sessions_spark_applications_summarize_jobs
- projects_locations_sessions_spark_applications_summarize_stages
- projects_locations_sessions_spark_applications_summarize_stage_attempt_tasks
- projects_locations_sessions_spark_applications_summarize_executors
Write wrapper objects from dataplane to spanner
EXEC google.dataproc.spark_applications.projects_locations_batches_spark_applications_write
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@batchesId='{{ batchesId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required
@@json=
'{
"parent": "{{ parent }}",
"sparkWrapperObjects": "{{ sparkWrapperObjects }}"
}';
Obtain high level information and list of Spark Applications corresponding to a batch
EXEC google.dataproc.spark_applications.projects_locations_batches_spark_applications_search
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@batchesId='{{ batchesId }}' --required,
@applicationStatus='{{ applicationStatus }}',
@minTime='{{ minTime }}',
@maxTime='{{ maxTime }}',
@minEndTime='{{ minEndTime }}',
@maxEndTime='{{ maxEndTime }}',
@pageSize='{{ pageSize }}',
@pageToken='{{ pageToken }}';
Obtain high level information corresponding to a single Spark Application.
EXEC google.dataproc.spark_applications.projects_locations_batches_spark_applications_access
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@batchesId='{{ batchesId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}';
Obtain list of spark jobs corresponding to a Spark Application.
EXEC google.dataproc.spark_applications.projects_locations_batches_spark_applications_search_jobs
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@batchesId='{{ batchesId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@jobStatus='{{ jobStatus }}',
@pageSize='{{ pageSize }}',
@pageToken='{{ pageToken }}';
Obtain data corresponding to a spark job for a Spark Application.
EXEC google.dataproc.spark_applications.projects_locations_batches_spark_applications_access_job
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@batchesId='{{ batchesId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@jobId='{{ jobId }}';
Obtain data corresponding to stages for a Spark Application.
EXEC google.dataproc.spark_applications.projects_locations_batches_spark_applications_search_stages
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@batchesId='{{ batchesId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@stageStatus='{{ stageStatus }}',
@pageSize='{{ pageSize }}',
@pageToken='{{ pageToken }}',
@summaryMetricsMask='{{ summaryMetricsMask }}';
Obtain data corresponding to a spark stage attempts for a Spark Application.
EXEC google.dataproc.spark_applications.projects_locations_batches_spark_applications_search_stage_attempts
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@batchesId='{{ batchesId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@stageId='{{ stageId }}',
@pageSize='{{ pageSize }}',
@pageToken='{{ pageToken }}',
@summaryMetricsMask='{{ summaryMetricsMask }}';
Obtain data corresponding to a spark stage attempt for a Spark Application.
EXEC google.dataproc.spark_applications.projects_locations_batches_spark_applications_access_stage_attempt
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@batchesId='{{ batchesId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@stageId='{{ stageId }}',
@stageAttemptId='{{ stageAttemptId }}',
@summaryMetricsMask='{{ summaryMetricsMask }}';
Obtain data corresponding to tasks for a spark stage attempt for a Spark Application.
EXEC google.dataproc.spark_applications.projects_locations_batches_spark_applications_search_stage_attempt_tasks
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@batchesId='{{ batchesId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@stageId='{{ stageId }}',
@stageAttemptId='{{ stageAttemptId }}',
@sortRuntime={{ sortRuntime }},
@taskStatus='{{ taskStatus }}',
@pageSize='{{ pageSize }}',
@pageToken='{{ pageToken }}';
Obtain data corresponding to executors for a Spark Application.
EXEC google.dataproc.spark_applications.projects_locations_batches_spark_applications_search_executors
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@batchesId='{{ batchesId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@executorStatus='{{ executorStatus }}',
@pageSize='{{ pageSize }}',
@pageToken='{{ pageToken }}';
Obtain executor summary with respect to a spark stage attempt.
EXEC google.dataproc.spark_applications.projects_locations_batches_spark_applications_search_executor_stage_summary
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@batchesId='{{ batchesId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@stageId='{{ stageId }}',
@stageAttemptId='{{ stageAttemptId }}',
@pageSize='{{ pageSize }}',
@pageToken='{{ pageToken }}';
Obtain data corresponding to SQL Queries for a Spark Application.
EXEC google.dataproc.spark_applications.projects_locations_batches_spark_applications_search_sql_queries
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@batchesId='{{ batchesId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@details={{ details }},
@planDescription={{ planDescription }},
@pageSize='{{ pageSize }}',
@pageToken='{{ pageToken }}';
Obtain data corresponding to a particular SQL Query for a Spark Application.
EXEC google.dataproc.spark_applications.projects_locations_batches_spark_applications_access_sql_query
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@batchesId='{{ batchesId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@executionId='{{ executionId }}',
@details={{ details }},
@planDescription={{ planDescription }};
Obtain Spark Plan Graph for a Spark Application SQL execution. Limits the number of clusters returned as part of the graph to 10000.
EXEC google.dataproc.spark_applications.projects_locations_batches_spark_applications_access_sql_plan
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@batchesId='{{ batchesId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@executionId='{{ executionId }}';
Obtain RDD operation graph for a Spark Application Stage. Limits the number of clusters returned as part of the graph to 10000.
EXEC google.dataproc.spark_applications.projects_locations_batches_spark_applications_access_stage_rdd_graph
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@batchesId='{{ batchesId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@stageId='{{ stageId }}';
Obtain environment details for a Spark Application
EXEC google.dataproc.spark_applications.projects_locations_batches_spark_applications_access_environment_info
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@batchesId='{{ batchesId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}';
Obtain summary of Jobs for a Spark Application
EXEC google.dataproc.spark_applications.projects_locations_batches_spark_applications_summarize_jobs
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@batchesId='{{ batchesId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}';
Obtain summary of Stages for a Spark Application
EXEC google.dataproc.spark_applications.projects_locations_batches_spark_applications_summarize_stages
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@batchesId='{{ batchesId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}';
Obtain summary of Tasks for a Spark Application Stage Attempt
EXEC google.dataproc.spark_applications.projects_locations_batches_spark_applications_summarize_stage_attempt_tasks
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@batchesId='{{ batchesId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@stageId='{{ stageId }}',
@stageAttemptId='{{ stageAttemptId }}';
Obtain summary of Executor Summary for a Spark Application
EXEC google.dataproc.spark_applications.projects_locations_batches_spark_applications_summarize_executors
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@batchesId='{{ batchesId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}';
Write wrapper objects from dataplane to spanner
EXEC google.dataproc.spark_applications.projects_locations_sessions_spark_applications_write
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@sessionsId='{{ sessionsId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required
@@json=
'{
"parent": "{{ parent }}",
"sparkWrapperObjects": "{{ sparkWrapperObjects }}"
}';
Obtain high level information and list of Spark Applications corresponding to a batch
EXEC google.dataproc.spark_applications.projects_locations_sessions_spark_applications_search
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@sessionsId='{{ sessionsId }}' --required,
@applicationStatus='{{ applicationStatus }}',
@minTime='{{ minTime }}',
@maxTime='{{ maxTime }}',
@minEndTime='{{ minEndTime }}',
@maxEndTime='{{ maxEndTime }}',
@pageSize='{{ pageSize }}',
@pageToken='{{ pageToken }}';
Obtain high level information corresponding to a single Spark Application.
EXEC google.dataproc.spark_applications.projects_locations_sessions_spark_applications_access
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@sessionsId='{{ sessionsId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}';
Obtain list of spark jobs corresponding to a Spark Application.
EXEC google.dataproc.spark_applications.projects_locations_sessions_spark_applications_search_jobs
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@sessionsId='{{ sessionsId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@jobStatus='{{ jobStatus }}',
@pageSize='{{ pageSize }}',
@pageToken='{{ pageToken }}',
@jobIds='{{ jobIds }}';
Obtain data corresponding to a spark job for a Spark Application.
EXEC google.dataproc.spark_applications.projects_locations_sessions_spark_applications_access_job
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@sessionsId='{{ sessionsId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@jobId='{{ jobId }}';
Obtain data corresponding to stages for a Spark Application.
EXEC google.dataproc.spark_applications.projects_locations_sessions_spark_applications_search_stages
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@sessionsId='{{ sessionsId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@stageStatus='{{ stageStatus }}',
@pageSize='{{ pageSize }}',
@pageToken='{{ pageToken }}',
@summaryMetricsMask='{{ summaryMetricsMask }}',
@stageIds='{{ stageIds }}';
Obtain data corresponding to a spark stage attempts for a Spark Application.
EXEC google.dataproc.spark_applications.projects_locations_sessions_spark_applications_search_stage_attempts
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@sessionsId='{{ sessionsId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@stageId='{{ stageId }}',
@pageSize='{{ pageSize }}',
@pageToken='{{ pageToken }}',
@summaryMetricsMask='{{ summaryMetricsMask }}';
Obtain data corresponding to a spark stage attempt for a Spark Application.
EXEC google.dataproc.spark_applications.projects_locations_sessions_spark_applications_access_stage_attempt
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@sessionsId='{{ sessionsId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@stageId='{{ stageId }}',
@stageAttemptId='{{ stageAttemptId }}',
@summaryMetricsMask='{{ summaryMetricsMask }}';
Obtain data corresponding to tasks for a spark stage attempt for a Spark Application.
EXEC google.dataproc.spark_applications.projects_locations_sessions_spark_applications_search_stage_attempt_tasks
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@sessionsId='{{ sessionsId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@stageId='{{ stageId }}',
@stageAttemptId='{{ stageAttemptId }}',
@sortRuntime={{ sortRuntime }},
@taskStatus='{{ taskStatus }}',
@pageSize='{{ pageSize }}',
@pageToken='{{ pageToken }}';
Obtain data corresponding to executors for a Spark Application.
EXEC google.dataproc.spark_applications.projects_locations_sessions_spark_applications_search_executors
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@sessionsId='{{ sessionsId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@executorStatus='{{ executorStatus }}',
@pageSize='{{ pageSize }}',
@pageToken='{{ pageToken }}';
Obtain executor summary with respect to a spark stage attempt.
EXEC google.dataproc.spark_applications.projects_locations_sessions_spark_applications_search_executor_stage_summary
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@sessionsId='{{ sessionsId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@stageId='{{ stageId }}',
@stageAttemptId='{{ stageAttemptId }}',
@pageSize='{{ pageSize }}',
@pageToken='{{ pageToken }}';
Obtain data corresponding to SQL Queries for a Spark Application.
EXEC google.dataproc.spark_applications.projects_locations_sessions_spark_applications_search_sql_queries
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@sessionsId='{{ sessionsId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@details={{ details }},
@planDescription={{ planDescription }},
@pageSize='{{ pageSize }}',
@pageToken='{{ pageToken }}',
@operationIds='{{ operationIds }}';
Obtain data corresponding to a particular SQL Query for a Spark Application.
EXEC google.dataproc.spark_applications.projects_locations_sessions_spark_applications_access_sql_query
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@sessionsId='{{ sessionsId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@executionId='{{ executionId }}',
@details={{ details }},
@planDescription={{ planDescription }};
Obtain Spark Plan Graph for a Spark Application SQL execution. Limits the number of clusters returned as part of the graph to 10000.
EXEC google.dataproc.spark_applications.projects_locations_sessions_spark_applications_access_sql_plan
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@sessionsId='{{ sessionsId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@executionId='{{ executionId }}';
Obtain RDD operation graph for a Spark Application Stage. Limits the number of clusters returned as part of the graph to 10000.
EXEC google.dataproc.spark_applications.projects_locations_sessions_spark_applications_access_stage_rdd_graph
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@sessionsId='{{ sessionsId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@stageId='{{ stageId }}';
Obtain environment details for a Spark Application
EXEC google.dataproc.spark_applications.projects_locations_sessions_spark_applications_access_environment_info
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@sessionsId='{{ sessionsId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}';
Obtain summary of Jobs for a Spark Application
EXEC google.dataproc.spark_applications.projects_locations_sessions_spark_applications_summarize_jobs
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@sessionsId='{{ sessionsId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@jobIds='{{ jobIds }}';
Obtain summary of Stages for a Spark Application
EXEC google.dataproc.spark_applications.projects_locations_sessions_spark_applications_summarize_stages
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@sessionsId='{{ sessionsId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@stageIds='{{ stageIds }}';
Obtain summary of Tasks for a Spark Application Stage Attempt
EXEC google.dataproc.spark_applications.projects_locations_sessions_spark_applications_summarize_stage_attempt_tasks
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@sessionsId='{{ sessionsId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}',
@stageId='{{ stageId }}',
@stageAttemptId='{{ stageAttemptId }}';
Obtain summary of Executor Summary for a Spark Application
EXEC google.dataproc.spark_applications.projects_locations_sessions_spark_applications_summarize_executors
@projectsId='{{ projectsId }}' --required,
@locationsId='{{ locationsId }}' --required,
@sessionsId='{{ sessionsId }}' --required,
@sparkApplicationsId='{{ sparkApplicationsId }}' --required,
@parent='{{ parent }}';