This commit is contained in:
Jacob Zhou 2021-05-25 21:34:47 -07:00
Родитель 55f1c9deee
Коммит 97b6f9faac
189 изменённых файлов: 4043 добавлений и 8 удалений

27
.devcontainer/Dockerfile Normal file
Просмотреть файл

@ -0,0 +1,27 @@
# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.177.0/containers/go/.devcontainer/base.Dockerfile
# [Choice] Go version: 1, 1.16, 1.15
ARG VARIANT="1.16"
FROM mcr.microsoft.com/vscode/devcontainers/go:0-${VARIANT}
# [Option] Install Node.js
ARG INSTALL_NODE="true"
ARG NODE_VERSION="lts/*"
RUN if [ "${INSTALL_NODE}" = "true" ]; then su vscode -c "umask 0002 && . /usr/local/share/nvm/nvm.sh && nvm install ${NODE_VERSION} 2>&1"; fi
# [Optional] Uncomment this section to install additional OS packages.
# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
# && apt-get -y install --no-install-recommends <your-package-list-here>
# Install Go tools
RUN \
# --> Install gocode
go get -u github.com/nsf/gocode \
# --> Install go-symbols
&& go get github.com/acroca/go-symbols \
# --> Install impl
&& go get github.com/sasha-s/goimpl/cmd/goimpl \
# --> Install junit converter
&& go get github.com/jstemmer/go-junit-report
# [Optional] Uncomment this line to install global node packages.
# RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g <your-package-here>" 2>&1

Просмотреть файл

@ -0,0 +1,67 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
// https://github.com/microsoft/vscode-dev-containers/tree/v0.177.0/containers/go
{
"name": "Databricks SDK Go",
"build": {
"dockerfile": "Dockerfile",
"args": {
// Update the VARIANT arg to pick a version of Go: 1, 1.16, 1.15
"VARIANT": "1.16",
// Options
"INSTALL_NODE": "true",
"NODE_VERSION": "lts/*"
}
},
"runArgs": [ "--cap-add=SYS_PTRACE", "--security-opt", "seccomp=unconfined" ],
// Set *default* container specific settings.json values on container create.
"settings": {
"terminal.integrated.shell.linux": "zsh",
"go.gopath": "/go",
"go.useLanguageServer": true,
"go.toolsManagement.checkForUpdates": "local",
"go.goroot": "/usr/local/go",
"[go]": {
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.organizeImports": true,
},
// Optional: Disable snippets, as they conflict with completion ranking.
"editor.snippetSuggestions": "none",
},
"[go.mod]": {
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.organizeImports": true,
},
},
"gopls": {
// Add parameter placeholders when completing a function.
"usePlaceholders": true,
// If true, enable additional analyses with staticcheck.
// Warning: This will significantly increase memory usage.
"staticcheck": false,
}
},
// Add the IDs of extensions you want installed when the container is created.
"extensions": [
"ms-vscode.go",
"golang.Go",
"humao.rest-client",
"IBM.output-colorizer",
"davidanson.vscode-markdownlint",
"eamodio.gitlens",
"adamhartford.vscode-base64"
],
// Use 'forwardPorts' to make a list of ports inside the container available locally.
// "forwardPorts": [],
// Use 'postCreateCommand' to run commands after the container is created.
"postCreateCommand": "go mod download",
// Comment out connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
// "remoteUser": "vscode"
}

3
.gitignore поставляемый Normal file
Просмотреть файл

@ -0,0 +1,3 @@
.env
.local
launch.json

23
.vscode/launch.json.template поставляемый Normal file
Просмотреть файл

@ -0,0 +1,23 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Launch test function",
"type": "go",
"request": "launch",
"mode": "test",
"program": "${workspaceFolder}/tests/acceptance/azure",
"args": [
"-test.run",
"TestAzure****"
],
"env": {
"DATABRICKS_HOST": "https://xxxx.azuredatabricks.net",
"DATABRICKS_TOKEN": "xxx"
}
}
]
}

11
Makefile Normal file
Просмотреть файл

@ -0,0 +1,11 @@
all : checks test
checks:
go build all
golangci-lint run
test: checks
go test ./...
fmt:
find . -name '*.go' | grep -v vendor | xargs gofmt -s -w

Просмотреть файл

@ -1,14 +1,44 @@
# Project
# databricks-sdk-golang
> This repo has been populated by an initial template to help get you started. Please
> make sure to update the content to build a great experience for community-building.
This is a Golang SDK for [DataBricks REST API 2.0](https://docs.databricks.com/api/latest/index.html#) and [Azure DataBricks REST API 2.0](https://docs.azuredatabricks.net/api/latest/index.html).
As the maintainer of this project, please make a few updates:
## Usage
- Improving this README.MD file to provide a great experience
- Updating SUPPORT.MD with content about this project's support experience
- Understanding the security reporting process in SECURITY.MD
- Remove this section from the README
```go
import (
databricks "github.com/polar-rams/databricks-sdk-golang"
dbAzure "github.com/polar-rams/databricks-sdk-golang/azure"
)
opt := databricks.NewDBClientOption("", "", os.Getenv("DATABRICKS_HOST"), os.Getenv("DATABRICKS_TOKEN"))
c := dbAzure.NewDBClient(opt)
jobs, err := c.Jobs().List()
```
## Implementation Progress
| API | Status |
| :--- | :---: |
| Account API | N/A |
| Clusters API | ✔ |
| Cluster Policies API | ✗ |
| DBFS API | ✔ |
| Global Init Scripts | ✗ |
| Groups API | ✔ |
| Instance Pools API | ✔ |
| IP Access List API | ✗ |
| Jobs API | ✔ |
| Libraries API | ✔ |
| MLflow** API | ✗ |
| Permissions API | ✗ |
| SCIM** API | ✗ |
| Secrets API | ✔ |
| Token API | ✔ |
| Token Management API | ✗ |
| Workspace API | ✔ |
** SCIM and MLflow are separate systems that are planned differently.
## Contributing

72
azure/client.go Normal file
Просмотреть файл

@ -0,0 +1,72 @@
package azure
import databricks "github.com/polar-rams/databricks-sdk-golang"
// DBClient is the client for Azure implements DBClient
type DBClient struct {
Option databricks.DBClientOption
}
func NewDBClient(option *databricks.DBClientOption) *DBClient {
return &DBClient{
Option: *option,
}
}
// Clusters returns an instance of ClustersAPI
func (c DBClient) Clusters() ClustersAPI {
var clustersAPI ClustersAPI
return clustersAPI.init(c)
}
// Dbfs returns an instance of DbfsAPI
func (c DBClient) Dbfs() DbfsAPI {
var dbfsAPI DbfsAPI
return dbfsAPI.init(c)
}
// Groups returns an instance of GroupAPI
func (c DBClient) Groups() GroupsAPI {
var groupsAPI GroupsAPI
return groupsAPI.init(c)
}
// InstancePools returns an instance of InstancePoolsAPI
func (c DBClient) InstancePools() InstancePoolsAPI {
var instancePoolsAPI InstancePoolsAPI
return instancePoolsAPI.init(c)
}
// Jobs returns an instance of JobsAPI
func (c DBClient) Jobs() JobsAPI {
var jobsAPI JobsAPI
return jobsAPI.init(c)
}
// Libraries returns an instance of LibrariesAPI
func (c DBClient) Libraries() LibrariesAPI {
var libraries LibrariesAPI
return libraries.init(c)
}
// Secrets returns an instance of SecretsAPI
func (c DBClient) Secrets() SecretsAPI {
var secretsAPI SecretsAPI
return secretsAPI.init(c)
}
// Token returns an instance of TokensAPI
func (c DBClient) Token() TokenAPI {
var tokenAPI TokenAPI
return tokenAPI.init(c)
}
// Workspace returns an instance of WorkspaceAPI
func (c DBClient) Workspace() WorkspaceAPI {
var workspaceAPI WorkspaceAPI
return workspaceAPI.init(c)
}
func (c *DBClient) performQuery(method, path string, data interface{}, headers map[string]string) ([]byte, error) {
return databricks.PerformQuery(c.Option, method, path, data, headers)
}

151
azure/clusters.go Normal file
Просмотреть файл

@ -0,0 +1,151 @@
package azure
import (
"encoding/json"
"net/http"
"github.com/polar-rams/databricks-sdk-golang/azure/clusters/httpmodels"
)
// ClustersAPI exposes the Clusters API
type ClustersAPI struct {
Client DBClient
}
func (a ClustersAPI) init(client DBClient) ClustersAPI {
a.Client = client
return a
}
// Create creates a new Spark cluster
func (a ClustersAPI) Create(req httpmodels.CreateReq) (httpmodels.CreateResp, error) {
var resp httpmodels.CreateResp
jsonResp, err := a.Client.performQuery(http.MethodPost, "/clusters/create", req, nil)
if err != nil {
return resp, err
}
err = json.Unmarshal(jsonResp, &resp)
return resp, err
}
// Edit edits the configuration of a cluster to match the provided attributes and size
func (a ClustersAPI) Edit(req httpmodels.EditReq) error {
_, err := a.Client.performQuery(http.MethodPost, "/clusters/edit", req, nil)
return err
}
// Start starts a terminated Spark cluster given its ID
func (a ClustersAPI) Start(req httpmodels.StartReq) error {
_, err := a.Client.performQuery(http.MethodPost, "/clusters/start", req, nil)
return err
}
// Restart restart a Spark cluster given its ID. If the cluster is not in a RUNNING state, nothing will happen.
func (a ClustersAPI) Restart(req httpmodels.RestartReq) error {
_, err := a.Client.performQuery(http.MethodPost, "/clusters/restart", req, nil)
return err
}
// Resize resizes a cluster to have a desired number of workers. This will fail unless the cluster is in a RUNNING state.
func (a ClustersAPI) Resize(req httpmodels.ResizeReq) error {
_, err := a.Client.performQuery(http.MethodPost, "/clusters/resize", req, nil)
return err
}
// Delete terminates a Spark cluster given its ID
func (a ClustersAPI) Delete(req httpmodels.DeleteReq) error {
_, err := a.Client.performQuery(http.MethodPost, "/clusters/delete", req, nil)
return err
}
// Terminate is an alias of Delete
func (a ClustersAPI) Terminate(req httpmodels.DeleteReq) error {
return a.Delete(req)
}
// PermanentDelete permanently delete a cluster
func (a ClustersAPI) PermanentDelete(req httpmodels.PermanentDeleteReq) error {
_, err := a.Client.performQuery(http.MethodPost, "/clusters/permanent-delete", req, nil)
return err
}
// Get retrieves the information for a cluster given its identifier
func (a ClustersAPI) Get(req httpmodels.GetReq) (httpmodels.GetResp, error) {
var resp httpmodels.GetResp
jsonResp, err := a.Client.performQuery(http.MethodGet, "/clusters/get", req, nil)
if err != nil {
return resp, err
}
err = json.Unmarshal(jsonResp, &resp)
return resp, err
}
// Pin ensure that an interactive cluster configuration is retained even after a cluster has been terminated for more than 30 days
func (a ClustersAPI) Pin(req httpmodels.PinReq) error {
_, err := a.Client.performQuery(http.MethodPost, "/clusters/pin", req, nil)
return err
}
// Unpin allows the cluster to eventually be removed from the list returned by the List API
func (a ClustersAPI) Unpin(req httpmodels.UnpinReq) error {
_, err := a.Client.performQuery(http.MethodPost, "/clusters/unpin", req, nil)
return err
}
// List return information about all pinned clusters, currently active clusters,
// up to 70 of the most recently terminated interactive clusters in the past 30 days,
// and up to 30 of the most recently terminated job clusters in the past 30 days
func (a ClustersAPI) List() (httpmodels.ListResp, error) {
var resp httpmodels.ListResp
jsonResp, err := a.Client.performQuery(http.MethodGet, "/clusters/list", nil, nil)
if err != nil {
return resp, err
}
err = json.Unmarshal(jsonResp, &resp)
return resp, err
}
// ListNodeTypes returns a list of supported Spark node types
func (a ClustersAPI) ListNodeTypes() (httpmodels.ListNodeTypesResp, error) {
var resp httpmodels.ListNodeTypesResp
jsonResp, err := a.Client.performQuery(http.MethodGet, "/clusters/list-node-types", nil, nil)
if err != nil {
return resp, err
}
err = json.Unmarshal(jsonResp, &resp)
return resp, err
}
// RuntimeVersions return the list of available Runtime versions
func (a ClustersAPI) RuntimeVersions() (httpmodels.RuntimeVersionsResp, error) {
var resp httpmodels.RuntimeVersionsResp
jsonResp, err := a.Client.performQuery(http.MethodGet, "/clusters/spark-versions", nil, nil)
if err != nil {
return resp, err
}
err = json.Unmarshal(jsonResp, &resp)
return resp, err
}
// Events retrieves a list of events about the activity of a cluster
func (a ClustersAPI) Events(req httpmodels.EventsReq) (httpmodels.EventsResp, error) {
var resp httpmodels.EventsResp
jsonResp, err := a.Client.performQuery(http.MethodPost, "/clusters/events", req, nil)
if err != nil {
return resp, err
}
err = json.Unmarshal(jsonResp, &resp)
return resp, err
}

Просмотреть файл

@ -0,0 +1,29 @@
package httpmodels
import (
"github.com/polar-rams/databricks-sdk-golang/azure/clusters/models"
)
type CreateReq struct {
NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"`
Autoscale *models.AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"`
ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"`
SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"`
SparkConf map[string]string `json:"spark_conf,omitempty" url:"spark_conf,omitempty"`
NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"`
DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"`
CustomTags []models.ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"`
ClusterLogConf *models.ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"`
InitScripts []models.InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"`
DockerImage *models.DockerImage `json:"docker_image,omitempty" url:"docker_image,omitempty"`
SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"`
AutoterminationMinutes int32 `json:"autotermination_minutes,omitempty" url:"autotermination_minutes,omitempty"`
InstancePoolID string `json:"instance_pool_id,omitempty" url:"instance_pool_id,omitempty"`
IdempotencyToken string `json:"idempotency_token,omitempty" url:"idempotency_token,omitempty"`
ApplyPolicyDefVal bool `json:"apply_policy_default_values,omitempty" url:"apply_policy_default_values,omitempty"`
EnableLocalDiskEncr bool `json:"enable_local_disk_encryption,omitempty" url:"enable_local_disk_encryption,omitempty"`
}
type CreateResp struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
}

Просмотреть файл

@ -0,0 +1,5 @@
package httpmodels
type DeleteReq struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
}

Просмотреть файл

@ -0,0 +1,24 @@
package httpmodels
import (
"github.com/polar-rams/databricks-sdk-golang/azure/clusters/models"
)
type EditReq struct {
NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"`
AutoScale models.AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"`
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"`
SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"`
SparkConf models.SparkConfPair `json:"spark_conf,omitempty" url:"spark_conf,omitempty"`
DockerImage models.DockerImage `json:"docker_image,omitempty" url:"docker_image,omitempty"`
NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"`
InstancePoolID string `json:"instance_pool_id,omitempty" url:"instance_pool_id,omitempty"`
DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"`
ClusterLogConf models.ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"`
InitScripts []models.InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"`
SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"`
AutoterminationMinutes int32 `json:"autotermination_minutes,omitempty" url:"autotermination_minutes,omitempty"`
ApplyPolicyDefVal bool `json:"apply_policy_default_values,omitempty" url:"apply_policy_default_values,omitempty"`
EnableLocalDiskEncr bool `json:"enable_local_disk_encryption,omitempty" url:"enable_local_disk_encryption,omitempty"`
}

Просмотреть файл

@ -0,0 +1,21 @@
package httpmodels
import (
"github.com/polar-rams/databricks-sdk-golang/azure/clusters/models"
)
type EventsReq struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
StartTime int64 `json:"start_time,omitempty" url:"start_time,omitempty"`
EndTime int64 `json:"end_time,omitempty" url:"end_time,omitempty"`
Order models.ListOrder `json:"order,omitempty" url:"order,omitempty"`
EventTypes []models.ClusterEventType `json:"event_types,omitempty" url:"event_types,omitempty"`
Offset int64 `json:"offset,omitempty" url:"offset,omitempty"`
Limit int64 `json:"limit,omitempty" url:"limit,omitempty"`
}
type EventsResp struct {
Events *[]models.ClusterEvent `json:"events,omitempty" url:"events,omitempty"`
NextPage EventsReq `json:"next_page,omitempty" url:"next_page,omitempty"`
TotalCount int32 `json:"total_count,omitempty" url:"total_count,omitempty"`
}

Просмотреть файл

@ -0,0 +1,44 @@
package httpmodels
import (
"github.com/polar-rams/databricks-sdk-golang/azure/clusters/models"
)
type GetReq struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
}
type GetResp struct {
NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"`
AutoScale models.AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"`
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"`
Driver models.SparkNode `json:"driver,omitempty" url:"driver,omitempty"`
Executors []models.SparkNode `json:"executors,omitempty" url:"executors,omitempty"`
SparkContextID int64 `json:"spark_context_id,omitempty" url:"spark_context_id,omitempty"`
JdbcPort int32 `json:"jdbc_port,omitempty" url:"jdbc_port,omitempty"`
ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"`
SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"`
SparkConf models.SparkConfPair `json:"spark_conf,omitempty" url:"spark_conf,omitempty"`
NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"`
DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"`
CustomTags models.ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"`
ClusterLogConf models.ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"`
InitScripts []models.InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"`
DockerImage models.DockerImage `json:"docker_image,omitempty" url:"docker_image,omitempty"`
SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"`
AutoterminationMinutes int32 `json:"autotermination_minutes,omitempty" url:"autotermination_minutes,omitempty"`
EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"`
InstancePoolID string `json:"instance_pool_id,omitempty" url:"instance_pool_id,omitempty"`
State models.ClusterState `json:"state,omitempty" url:"state,omitempty"`
StateMessage string `json:"state_message,omitempty" url:"state_message,omitempty"`
StartTime int64 `json:"start_time,omitempty" url:"start_time,omitempty"`
TerminateTime int64 `json:"terminate_time,omitempty" url:"terminate_time,omitempty"`
LastStateLossTime int64 `json:"last_state_loss_time,omitempty" url:"last_state_loss_time,omitempty"`
LastActivityTime int64 `json:"last_activity_time,omitempty" url:"last_activity_time,omitempty"`
ClusterMemoryMb int64 `json:"cluster_memory_mb,omitempty" url:"cluster_memory_mb,omitempty"`
ClusterCores float32 `json:"cluster_cores,omitempty" url:"cluster_cores,omitempty"`
DefaultTags map[string]string `json:"default_tags,omitempty" url:"default_tags,omitempty"`
ClusterLogStatus models.LogSyncStatus `json:"cluster_log_status,omitempty" url:"cluster_log_status,omitempty"`
TerminationReason models.TerminationReason `json:"termination_reason,omitempty" url:"termination_reason,omitempty"`
}

Просмотреть файл

@ -0,0 +1,7 @@
package httpmodels
import "github.com/polar-rams/databricks-sdk-golang/azure/clusters/models"
type ListResp struct {
Clusters []models.ClusterInfo
}

Просмотреть файл

@ -0,0 +1,9 @@
package httpmodels
import (
"github.com/polar-rams/databricks-sdk-golang/azure/clusters/models"
)
type ListNodeTypesResp struct {
NodeTypes []models.NodeType `json:"node_types,omitempty" url:"node_types,omitempty"`
}

Просмотреть файл

@ -0,0 +1,5 @@
package httpmodels
type PermanentDeleteReq struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
}

Просмотреть файл

@ -0,0 +1,5 @@
package httpmodels
type PinReq struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
}

Просмотреть файл

@ -0,0 +1,11 @@
package httpmodels
import (
"github.com/polar-rams/databricks-sdk-golang/azure/clusters/models"
)
type ResizeReq struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"`
Autoscale models.AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"`
}

Просмотреть файл

@ -0,0 +1,5 @@
package httpmodels
type RestartReq struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
}

Просмотреть файл

@ -0,0 +1,7 @@
package httpmodels
import "github.com/polar-rams/databricks-sdk-golang/azure/clusters/models"
type RuntimeVersionsResp struct {
Versions []models.SparkVersion `json:"versions,omitempty" url:"versions,omitempty"`
}

Просмотреть файл

@ -0,0 +1,5 @@
package httpmodels
type StartReq struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
}

Просмотреть файл

@ -0,0 +1,5 @@
package httpmodels
type UnpinReq struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
}

Просмотреть файл

@ -0,0 +1,6 @@
package models
type AutoScale struct {
MinWorkers int32 `json:"min_workers,omitempty" url:"min_workers,omitempty"`
MaxWorkers int32 `json:"max_workers,omitempty" url:"max_workers,omitempty"`
}

Просмотреть файл

@ -0,0 +1,7 @@
package models
type AzureAttributes struct {
FirstOnDemand int32 `json:"first_on_demand,omitempty" url:"first_on_demand,omitempty"`
Availability AzureAvailability `json:"availability,omitempty" url:"availability,omitempty"`
SpotBidMaxPrice float64 `json:"spot_bid_max_price,omitempty" url:"spot_bid_max_price,omitempty"`
}

Просмотреть файл

@ -0,0 +1,9 @@
package models
type AzureAvailability string
const (
AzureAvailabilitySpotAzure = "SPOT_AZURE"
AzureAvailabilityOnDemandAzure = "ON_DEMAND_AZURE"
AzureAvailabilitySpotWithFallbackAzure = "SPOT_WITH_FALLBACK_AZURE"
)

Просмотреть файл

@ -0,0 +1,21 @@
package models
type ClusterAttributes struct {
ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"`
SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"`
SparkConf SparkConfPair `json:"spark_conf,omitempty" url:"spark_conf,omitempty"`
NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"`
DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"`
SSHPublicKeys []string `json:"ssh_public_keys,omitempty" url:"ssh_public_keys,omitempty"`
CustomTags []ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"`
ClusterLogConf ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"`
InitScripts []InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"`
DockerImage DockerImage `json:"docker_image,omitempty" url:"docker_image,omitempty"`
SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"`
AutoterminationMinutes int32 `json:"autotermination_minutes,omitempty" url:"autotermination_minutes,omitempty"`
EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"`
InstancePoolID string `json:"instance_pool_id,omitempty" url:"instance_pool_id,omitempty"`
ClusterSource ClusterSource `json:"cluster_source,omitempty" url:"cluster_source,omitempty"`
PolicyID string `json:"policy_id,omitempty" url:"policy_id,omitempty"`
AzureAttributes AzureAttributes `json:"azure_attributes,omitempty" url:"azure_attributes,omitempty"`
}

Просмотреть файл

@ -0,0 +1,7 @@
package models
type ClusterCloudProviderNodeInfo struct {
Status ClusterCloudProviderNodeStatus `json:"status,omitempty" url:"status,omitempty"`
AvailableCoreQuota int32 `json:"available_core_quota,omitempty" url:"available_core_quota,omitempty"`
TotalCoreQuota int32 `json:"total_core_quota,omitempty" url:"total_core_quota,omitempty"`
}

Просмотреть файл

@ -0,0 +1,8 @@
package models
type ClusterCloudProviderNodeStatus string
const (
ClusterCloudProviderNodeStatusNotEnabledOnSubscription = "NotEnabledOnSubscription"
ClusterCloudProviderNodeStatusNotAvailableInRegion = "NotAvailableInRegion"
)

Просмотреть файл

@ -0,0 +1,8 @@
package models
type ClusterEvent struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
Timestamp int64 `json:"timestamp,omitempty" url:"timestamp,omitempty"`
Type ClusterEventType `json:"type,omitempty" url:"type,omitempty"`
Details *EventDetails `json:"details,omitempty" url:"details,omitempty"`
}

Просмотреть файл

@ -0,0 +1,27 @@
package models
type ClusterEventType string
const (
ClusterEventTypeCreating = "CREATING"
ClusterEventTypeDidNotExpandDisk = "DID_NOT_EXPAND_DISK"
ClusterEventTypeExpandedDisk = "EXPANDED_DISK"
ClusterEventTypeFailedToExpandDisk = "FAILED_TO_EXPAND_DISK"
ClusterEventTypeInitScriptStarting = "INIT_SCRIPTS_STARTING"
ClusterEventTypeInitScriptFinished = "INIT_SCRIPTS_FINISHED"
ClusterEventTypeStarting = "STARTING"
ClusterEventTypeRestarting = "RESTARTING"
ClusterEventTypeTerminating = "TERMINATING"
ClusterEventTypeEdited = "EDITED"
ClusterEventTypeRunning = "RUNNING"
ClusterEventTypeResizing = "RESIZING"
ClusterEventTypeUpsizeCompleted = "UPSIZE_COMPLETED"
ClusterEventTypeNodesLost = "NODES_LOST"
ClusterEventTypeDriverHealthy = "DRIVER_HEALTHY"
ClusterEventTypeDriverUnavailable = "DRIVER_UNAVAILABLE"
ClusterEventTypeSparkException = "SPARK_EXCEPTION"
ClusterEventTypeDriverNotResponding = "DRIVER_NOT_RESPONDING"
ClusterEventTypeDbfsDown = "DBFS_DOWN"
ClusterEventTypeMetastoreDown = "METASTORE_DOWN"
ClusterEventTypeAutoscalingStatsReport = "AUTOSCALING_STATS_REPORT"
)

Просмотреть файл

@ -0,0 +1,32 @@
package models
type ClusterInfo struct {
NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"`
AutoScale *AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"`
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"`
Driver SparkNode `json:"driver,omitempty" url:"driver,omitempty"`
Executors *[]SparkNode `json:"executors,omitempty" url:"executors,omitempty"`
SparkContextID int64 `json:"spark_context_id,omitempty" url:"spark_context_id,omitempty"`
JdbcPort int32 `json:"jdbc_port,omitempty" url:"jdbc_port,omitempty"`
ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"`
SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"`
SparkConf SparkConfPair `json:"spark_conf,omitempty" url:"spark_conf,omitempty"`
NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"`
DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"`
ClusterLogConf ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"`
InitScripts *[]InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"`
SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"`
AutoterminationMinutes int32 `json:"autotermination_minutes,omitempty" url:"autotermination_minutes,omitempty"`
State ClusterState `json:"state,omitempty" url:"state,omitempty"`
StateMessage string `json:"state_message,omitempty" url:"state_message,omitempty"`
StartTime int64 `json:"start_time,omitempty" url:"start_time,omitempty"`
TerminateTime int64 `json:"terminate_time,omitempty" url:"terminate_time,omitempty"`
LastStateLossTime int64 `json:"last_state_loss_time,omitempty" url:"last_state_loss_time,omitempty"`
LastActivityTime int64 `json:"last_activity_time,omitempty" url:"last_activity_time,omitempty"`
ClusterMemoryMb int64 `json:"cluster_memory_mb,omitempty" url:"cluster_memory_mb,omitempty"`
ClusterCores float32 `json:"cluster_cores,omitempty" url:"cluster_cores,omitempty"`
DefaultTags map[string]string `json:"default_tags,omitempty" url:"default_tags,omitempty"`
ClusterLogStatus LogSyncStatus `json:"cluster_log_status,omitempty" url:"cluster_log_status,omitempty"`
TerminationReason TerminationReason `json:"termination_reason,omitempty" url:"termination_reason,omitempty"`
}

Просмотреть файл

@ -0,0 +1,5 @@
package models
type ClusterLogConf struct {
Dbfs DbfsStorageInfo `json:"dbfs,omitempty" url:"dbfs,omitempty"`
}

Просмотреть файл

@ -0,0 +1,6 @@
package models
type ClusterSize struct {
NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"`
Autoscale AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"`
}

Просмотреть файл

@ -0,0 +1,9 @@
package models
type ClusterSource string
const (
ClusterSourceUI = "UI"
ClusterSourceJob = "JOB"
ClusterSourceAPI = "API"
)

Просмотреть файл

@ -0,0 +1,14 @@
package models
type ClusterState string
const (
ClusterStatePending = "PENDING"
ClusterStateRunning = "RUNNING"
ClusterStateRestarting = "RESTARTING"
ClusterStateResizing = "RESIZING"
ClusterStateTerminating = "TERMINATING"
ClusterStateError = "ERROR"
ClusterStateUnknown = "UNKNOWN"
ClusterStateTERMINATED = "TERMINATED"
)

Просмотреть файл

@ -0,0 +1,6 @@
package models
type ClusterTag struct {
Key string `json:"key,omitempty" url:"key,omitempty"`
Value string `json:"value,omitempty" url:"value,omitempty"`
}

Просмотреть файл

@ -0,0 +1,5 @@
package models
type DbfsStorageInfo struct {
Destination string `json:"destination,omitempty" url:"destination,omitempty"`
}

Просмотреть файл

@ -0,0 +1,6 @@
package models
type DockerBasicAuth struct {
Username string `json:"username,omitempty" url:"username,omitempty"`
Password string `json:"password,omitempty" url:"password,omitempty"`
}

Просмотреть файл

@ -0,0 +1,6 @@
package models
type DockerImage struct {
Url string `json:"url,omitempty" url:"url,omitempty"`
BasicAuth DockerBasicAuth `json:"basic_auth,omitempty" url:"basic_auth,omitempty"`
}

Просмотреть файл

@ -0,0 +1,10 @@
package models
type EventDetails struct {
CurrentNumWorkers int32 `json:"current_num_workers,omitempty" url:"current_num_workers,omitempty"`
TargetNumWorkers int32 `json:"target_num_workers,omitempty" url:"target_num_workers,omitempty"`
PreviousAttributes *ClusterAttributes `json:"previous_attributes,omitempty" url:"previous_attributes,omitempty"`
Attributes *ClusterAttributes `json:"attributes,omitempty" url:"attributes,omitempty"`
PreviousClusterSize ClusterSize `json:"previous_cluster_size,omitempty" url:"previous_cluster_size,omitempty"`
ClusterSize ClusterSize `json:"cluster_size,omitempty" url:"cluster_size,omitempty"`
}

Просмотреть файл

@ -0,0 +1,5 @@
package models
type FileStorageInfo struct {
Destination string `json:"destination,omitempty" url:"destination,omitempty"`
}

Просмотреть файл

@ -0,0 +1,6 @@
package models
type InitScriptInfo struct {
Dbfs DbfsStorageInfo `json:"dbfs,omitempty" url:"dbfs,omitempty"`
Fs FileStorageInfo `json:"fs,omitempty" url:"fs,omitempty"`
}

Просмотреть файл

@ -0,0 +1,8 @@
package models
type ListOrder string
const (
ListOrderDesc = "DESC"
ListOrderAsc = "ASC"
)

Просмотреть файл

@ -0,0 +1,6 @@
package models
type LogSyncStatus struct {
LastAttempted int64 `json:"last_attempted,omitempty" url:"last_attempted,omitempty"`
LastException string `json:"last_exception,omitempty" url:"last_exception,omitempty"`
}

Просмотреть файл

@ -0,0 +1,11 @@
package models
type NodeType struct {
NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"`
MemoryMb int32 `json:"memory_mb,omitempty" url:"memory_mb,omitempty"`
NumCores float32 `json:"num_cores,omitempty" url:"num_cores,omitempty"`
Description string `json:"description,omitempty" url:"description,omitempty"`
InstanceTypeID string `json:"instance_type_id,omitempty" url:"instance_type_id,omitempty"`
IsDeprecated bool `json:"is_deprecated,omitempty" url:"is_deprecated,omitempty"`
NodeInfo ClusterCloudProviderNodeInfo `json:"node_info,omitempty" url:"node_info,omitempty"`
}

Просмотреть файл

@ -0,0 +1,6 @@
package models
type ParameterPair struct {
Key string `json:"key,omitempty" url:"key,omitempty"`
Value string `json:"value,omitempty" url:"value,omitempty"`
}

Просмотреть файл

@ -0,0 +1,6 @@
package models
type SparkConfPair struct {
Key string `json:"key,omitempty" url:"key,omitempty"`
Value string `json:"value,omitempty" url:"value,omitempty"`
}

Просмотреть файл

@ -0,0 +1,10 @@
package models
type SparkNode struct {
PrivateIP string `json:"private_ip,omitempty" url:"private_ip,omitempty"`
PublicDNS string `json:"public_dns,omitempty" url:"public_dns,omitempty"`
NodeID string `json:"node_id,omitempty" url:"node_id,omitempty"`
InstanceID string `json:"instance_id,omitempty" url:"instance_id,omitempty"`
StartTimestamp int64 `json:"start_timestamp,omitempty" url:"start_timestamp,omitempty"`
HostPrivateIP string `json:"host_private_ip,omitempty" url:"host_private_ip,omitempty"`
}

Просмотреть файл

@ -0,0 +1,6 @@
package models
type SparkVersion struct {
Key string `json:"key,omitempty" url:"key,omitempty"`
Name string `json:"name,omitempty" url:"name,omitempty"`
}

Просмотреть файл

@ -0,0 +1,20 @@
package models
type TerminationCode string
const (
TerminationCodeUserRequest = "USER_REQUEST"
TerminationCodeJobFinished = "JOB_FINISHED"
TerminationCodeInactivity = "INACTIVITY"
TerminationCodeCloudProviderShutdown = "CLOUD_PROVIDER_SHUTDOWN"
TerminationCodeCommunicationLost = "COMMUNICATION_LOST"
TerminationCodeCloudProviderLaunchFailure = "CLOUD_PROVIDER_LAUNCH_FAILURE"
TerminationCodeSparkStartupFailure = "SPARK_STARTUP_FAILURE"
TerminationCodeInvalidArgument = "INVALID_ARGUMENT"
TerminationCodeUnexpectedLaunchFailure = "UNEXPECTED_LAUNCH_FAILURE"
TerminationCodeInternalError = "INTERNAL_ERROR"
TerminationCodeInstanceUnreachable = "INSTANCE_UNREACHABLE"
TerminationCodeRequestRejected = "REQUEST_REJECTED"
TerminationCodeInitScriptFailure = "INIT_SCRIPT_FAILURE"
TerminationCodeTrialExpired = "TRIAL_EXPIRED"
)

Просмотреть файл

@ -0,0 +1,12 @@
package models
type TerminationParameter string
const (
TerminationParameterUsername = "username"
TerminationParameterDatabricksErrorMessage = "databricks_error_message"
TerminationParameterInactivityDurationMin = "inactivity_duration_min"
TerminationParameterInstanceID = "instance_id"
TerminationParameterAzureErrorCode = "azure_error_code"
TerminationParameterAzureErrorMessage = "azure_error_message"
)

Просмотреть файл

@ -0,0 +1,7 @@
package models
type TerminationReason struct {
Code TerminationCode `json:"code,omitempty" url:"code,omitempty"`
Type TerminationType `json:"type,omitempty" url:"code,omitempty"`
Parameters []ParameterPair `json:"parameters,omitempty" url:"parameters,omitempty"`
}

Просмотреть файл

@ -0,0 +1,10 @@
package models
type TerminationType string
const (
Success = "SUCCESS"
ClientError = "CLIENT_ERROR"
ServiceFault = "SERVICE_FAULT"
CloudFailure = "CLOUD_FAILURE"
)

106
azure/dbfs.go Normal file
Просмотреть файл

@ -0,0 +1,106 @@
package azure
import (
"encoding/json"
"net/http"
"github.com/polar-rams/databricks-sdk-golang/azure/dbfs/httpmodels"
)
// DbfsAPI exposes the DBFS API
type DbfsAPI struct {
Client DBClient
}
func (a DbfsAPI) init(client DBClient) DbfsAPI {
a.Client = client
return a
}
// AddBlock appends a block of data to the stream specified by the input handle
func (a DbfsAPI) AddBlock(req httpmodels.AddBlockReq) error {
_, err := a.Client.performQuery(http.MethodPost, "/dbfs/add-block", req, nil)
return err
}
// Close closes the stream specified by the input handle
func (a DbfsAPI) Close(req httpmodels.CloseReq) error {
_, err := a.Client.performQuery(http.MethodPost, "/dbfs/close", req, nil)
return err
}
// Create opens a stream to write to a file and returns a handle to this stream
func (a DbfsAPI) Create(req httpmodels.CreateReq) (httpmodels.CreateResp, error) {
var resp httpmodels.CreateResp
jsonResp, err := a.Client.performQuery(http.MethodPost, "/dbfs/create", req, nil)
if err != nil {
return resp, err
}
err = json.Unmarshal(jsonResp, &resp)
return resp, err
}
// Delete deletes the file or directory (optionally recursively delete all files in the directory)
func (a DbfsAPI) Delete(req httpmodels.DeleteReq) error {
_, err := a.Client.performQuery(http.MethodPost, "/dbfs/delete", req, nil)
return err
}
// GetStatus gets the file information of a file or directory
func (a DbfsAPI) GetStatus(req httpmodels.GetStatusReq) (httpmodels.GetStatusResp, error) {
var resp httpmodels.GetStatusResp
jsonResp, err := a.Client.performQuery(http.MethodGet, "/dbfs/get-status", req, nil)
if err != nil {
return resp, err
}
err = json.Unmarshal(jsonResp, &resp)
return resp, err
}
// List lists the contents of a directory, or details of the file
func (a DbfsAPI) List(req httpmodels.ListReq) (httpmodels.ListResp, error) {
var resp httpmodels.ListResp
jsonResp, err := a.Client.performQuery(http.MethodGet, "/dbfs/list", req, nil)
if err != nil {
return resp, err
}
err = json.Unmarshal(jsonResp, &resp)
return resp, err
}
// Mkdirs creates the given directory and necessary parent directories if they do not exist
func (a DbfsAPI) Mkdirs(req httpmodels.MkdirsReq) error {
_, err := a.Client.performQuery(http.MethodPost, "/dbfs/mkdirs", req, nil)
return err
}
// Move moves a file from one location to another location within DBFS
func (a DbfsAPI) Move(req httpmodels.MoveReq) error {
_, err := a.Client.performQuery(http.MethodPost, "/dbfs/move", req, nil)
return err
}
// Put uploads a file through the use of multipart form post
func (a DbfsAPI) Put(req httpmodels.PutReq) error {
_, err := a.Client.performQuery(http.MethodPost, "/dbfs/put", req, nil)
return err
}
// Read returns the contents of a file
func (a DbfsAPI) Read(req httpmodels.ReadReq) (httpmodels.ReadResp, error) {
var resp httpmodels.ReadResp
jsonResp, err := a.Client.performQuery(http.MethodGet, "/dbfs/read", req, nil)
if err != nil {
return resp, err
}
err = json.Unmarshal(jsonResp, &resp)
return resp, err
}

Просмотреть файл

@ -0,0 +1,6 @@
package httpmodels
type AddBlockReq struct {
Handle int64 `json:"handle,omitempty" url:"handle,omitempty"`
Data string `json:"data,omitempty" url:"data,omitempty"`
}

Просмотреть файл

@ -0,0 +1,5 @@
package httpmodels
type CloseReq struct {
Handle int64 `json:"handle,omitempty" url:"handle,omitempty"`
}

Просмотреть файл

@ -0,0 +1,10 @@
package httpmodels
type CreateReq struct {
Path string `json:"path,omitempty" url:"path,omitempty"`
Overwrite bool `json:"overwrite,omitempty" url:"overwrite,omitempty"`
}
type CreateResp struct {
Handle int64 `json:"handle,omitempty" url:"handle,omitempty"`
}

Просмотреть файл

@ -0,0 +1,6 @@
package httpmodels
type DeleteReq struct {
Path string `json:"path,omitempty" url:"path,omitempty"`
Recursive bool `json:"recursive,omitempty" url:"recursive,omitempty"`
}

Просмотреть файл

@ -0,0 +1,12 @@
package httpmodels
type GetStatusReq struct {
Path string `json:"path,omitempty" url:"path,omitempty"`
}
type GetStatusResp struct {
Path string `json:"path,omitempty" url:"path,omitempty"`
IsDir bool `json:"is_dir,omitempty" url:"is_dir,omitempty"`
FileSize int64 `json:"file_size,omitempty" url:"file_size,omitempty"`
ModificationTime int64 `json:"modification_time,omitempty" url:"modification_time,omitempty"`
}

Просмотреть файл

@ -0,0 +1,13 @@
package httpmodels
import (
"github.com/polar-rams/databricks-sdk-golang/azure/dbfs/models"
)
type ListReq struct {
Path string `json:"path,omitempty" url:"path,omitempty"`
}
type ListResp struct {
Files *[]models.FileInfo `json:"files,omitempty" url:"files,omitempty"`
}

Просмотреть файл

@ -0,0 +1,5 @@
package httpmodels
type MkdirsReq struct {
Path string `json:"path,omitempty" url:"path,omitempty"`
}

Просмотреть файл

@ -0,0 +1,6 @@
package httpmodels
type MoveReq struct {
SourcePath string `json:"source_path,omitempty" url:"source_path,omitempty"`
DestinationPath string `json:"destination_path,omitempty" url:"destination_path,omitempty"`
}

Просмотреть файл

@ -0,0 +1,7 @@
package httpmodels
type PutReq struct {
Path string `json:"path,omitempty" url:"path,omitempty"`
Contents string `json:"contents,omitempty" url:"contents,omitempty"`
Overwrite bool `json:"overwrite,omitempty" url:"overwrite,omitempty"`
}

Просмотреть файл

@ -0,0 +1,12 @@
package httpmodels
type ReadReq struct {
Path string `json:"path,omitempty" url:"path,omitempty"`
Offset int64 `json:"offset,omitempty" url:"offset,omitempty"`
Length int64 `json:"length,omitempty" url:"length,omitempty"`
}
type ReadResp struct {
BytesRead int64 `json:"bytes_read,omitempty" url:"bytes_read,omitempty"`
Data string `json:"data,omitempty" url:"data,omitempty"`
}

Просмотреть файл

@ -0,0 +1,8 @@
package models
type FileInfo struct {
Path string `json:"path,omitempty" url:"path,omitempty"`
IsDir bool `json:"is_dir,omitempty" url:"is_dir,omitempty"`
FileSize int64 `json:"file_size,omitempty" url:"file_size,omitempty"`
ModificationTime int64 `json:"modification_time,omitempty" url:"modification_time,omitempty"`
}

88
azure/groups.go Normal file
Просмотреть файл

@ -0,0 +1,88 @@
package azure
import (
"encoding/json"
"net/http"
"github.com/polar-rams/databricks-sdk-golang/azure/groups/httpmodels"
)
// GroupsAPI exposes the Groups API
type GroupsAPI struct {
Client DBClient
}
func (a GroupsAPI) init(client DBClient) GroupsAPI {
a.Client = client
return a
}
// AddMember adds a user or group to a group
func (a GroupsAPI) AddMember(req httpmodels.AddMemberReq) error {
_, err := a.Client.performQuery(http.MethodPost, "/groups/add-member", req, nil)
return err
}
// Create creates a new group with the given name
func (a GroupsAPI) Create(req httpmodels.CreateReq) (httpmodels.CreateResp, error) {
var resp httpmodels.CreateResp
jsonResp, err := a.Client.performQuery(http.MethodPost, "/groups/create", req, nil)
if err != nil {
return resp, err
}
err = json.Unmarshal(jsonResp, &resp)
return resp, err
}
// ListMembers returns all of the members of a particular group
func (a GroupsAPI) ListMembers(req httpmodels.ListMembersReq) (httpmodels.ListMembersResp, error) {
var resp httpmodels.ListMembersResp
jsonResp, err := a.Client.performQuery(http.MethodGet, "/groups/list-members", req, nil)
if err != nil {
return resp, err
}
err = json.Unmarshal(jsonResp, &resp)
return resp, err
}
// List returns all of the groups in an organization
func (a GroupsAPI) List() (httpmodels.ListResp, error) {
var resp httpmodels.ListResp
jsonResp, err := a.Client.performQuery(http.MethodGet, "/groups/list", nil, nil)
if err != nil {
return resp, err
}
err = json.Unmarshal(jsonResp, &resp)
return resp, err
}
// ListParents retrieves all groups in which a given user or group is a member
func (a GroupsAPI) ListParents(listParentsReq httpmodels.ListParentsReq) (httpmodels.ListParentsResp, error) {
var resp httpmodels.ListParentsResp
jsonResp, err := a.Client.performQuery(http.MethodGet, "/groups/list-parents", listParentsReq, nil)
if err != nil {
return resp, err
}
err = json.Unmarshal(jsonResp, &resp)
return resp, err
}
// RemoveMember removes a user or group from a group
func (a GroupsAPI) RemoveMember(removeMemberReq httpmodels.RemoveMemberReq) error {
_, err := a.Client.performQuery(http.MethodPost, "/groups/remove-member", removeMemberReq, nil)
return err
}
// Delete removes a group from this organization
func (a GroupsAPI) Delete(deleteReq httpmodels.DeleteReq) error {
_, err := a.Client.performQuery(http.MethodPost, "/groups/delete", deleteReq, nil)
return err
}

Просмотреть файл

@ -0,0 +1,7 @@
package httpmodels
type AddMemberReq struct {
UserName string `json:"user_name,omitempty" url:"user_name,omitempty"`
GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"`
ParentName string `json:"parent_name,omitempty" url:"parent_name,omitempty"`
}

Просмотреть файл

@ -0,0 +1,9 @@
package httpmodels
type CreateReq struct {
GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"`
}
type CreateResp struct {
GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"`
}

Просмотреть файл

@ -0,0 +1,5 @@
package httpmodels
type DeleteReq struct {
GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"`
}

Просмотреть файл

@ -0,0 +1,5 @@
package httpmodels
type ListResp struct {
GroupNames []string `json:"group_names,omitempty" url:"group_names,omitempty"`
}

Просмотреть файл

@ -0,0 +1,11 @@
package httpmodels
import "github.com/polar-rams/databricks-sdk-golang/azure/groups/models"
type ListMembersReq struct {
GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"`
}
type ListMembersResp struct {
Members *[]models.PrincipalName `json:"members,omitempty" url:"members,omitempty"`
}

Просмотреть файл

@ -0,0 +1,10 @@
package httpmodels
type ListParentsReq struct {
UserName string `json:"user_name,omitempty" url:"user_name,omitempty"`
GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"`
}
type ListParentsResp struct {
GroupNames []string `json:"group_names,omitempty" url:"group_names,omitempty"`
}

Просмотреть файл

@ -0,0 +1,7 @@
package httpmodels
type RemoveMemberReq struct {
UserName string `json:"user_name,omitempty" url:"user_name,omitempty"`
GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"`
ParentName string `json:"parent_name,omitempty" url:"parent_name,omitempty"`
}

Просмотреть файл

@ -0,0 +1,6 @@
package models
type PrincipalName struct {
UserName string `json:"user_name,omitempty" url:"user_name,omitempty"`
GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"`
}

69
azure/instance_pools.go Normal file
Просмотреть файл

@ -0,0 +1,69 @@
package azure
import (
"encoding/json"
"net/http"
"github.com/polar-rams/databricks-sdk-golang/azure/instance_pools/httpmodels"
)
// InstancePoolsAPI exposes the InstancePools API
type InstancePoolsAPI struct {
Client DBClient
}
func (a InstancePoolsAPI) init(client DBClient) InstancePoolsAPI {
a.Client = client
return a
}
// Create creates an instance pool
func (a InstancePoolsAPI) Create(req httpmodels.CreateReq) (httpmodels.CreateResp, error) {
var resp httpmodels.CreateResp
jsonResp, err := a.Client.performQuery(http.MethodPost, "/instance-pools/create", req, nil)
if err != nil {
return resp, err
}
err = json.Unmarshal(jsonResp, &resp)
return resp, err
}
// Edit modifies the configuration of an existing instance pool.
func (a InstancePoolsAPI) Edit(req httpmodels.EditReq) error {
_, err := a.Client.performQuery(http.MethodPost, "/instance-pools/edit", req, nil)
return err
}
// Delete permanently deletes the instance pool.
func (a InstancePoolsAPI) Delete(req httpmodels.DeleteReq) error {
_, err := a.Client.performQuery(http.MethodPost, "/instance-pools/delete", req, nil)
return err
}
// Get retrieves the information for an instance pool given its identifier.
func (a InstancePoolsAPI) Get(req httpmodels.GetReq) (httpmodels.GetResp, error) {
var resp httpmodels.GetResp
jsonResp, err := a.Client.performQuery(http.MethodGet, "/instance-pools/get", req, nil)
if err != nil {
return resp, err
}
err = json.Unmarshal(jsonResp, &resp)
return resp, err
}
// List returns information for all instance pools.
func (a InstancePoolsAPI) List() (httpmodels.ListResp, error) {
var resp httpmodels.ListResp
jsonResp, err := a.Client.performQuery(http.MethodGet, "/instance-pools/list", nil, nil)
if err != nil {
return resp, err
}
err = json.Unmarshal(jsonResp, &resp)
return resp, err
}

Просмотреть файл

@ -0,0 +1,26 @@
package httpmodels
import (
clustersModels "github.com/polar-rams/databricks-sdk-golang/azure/clusters/models"
"github.com/polar-rams/databricks-sdk-golang/azure/instance_pools/models"
)
//CreateReq is to compose the request for creating an instance pool
type CreateReq struct {
InstancePoolName string `json:"instance_pool_name,omitempty" url:"instance_pool_name,omitempty"`
MinIdleInstances int32 `json:"min_idle_instances,omitempty" url:"min_idle_instances,omitempty"`
MaxCapacity int32 `json:"max_capacity,omitempty" url:"max_capacity,omitempty"`
NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"`
CustomTags *[]clustersModels.ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"`
IdleInstanceAutoterminationMinutes int32 `json:"idle_instance_autotermination_minutes,omitempty" url:"idle_instance_autotermination_minutes,omitempty"`
EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"`
DiskSpec models.DiskSpec `json:"disk_spec,omitempty" url:"disk_spec,omitempty"`
PreloadedSparkVersions *[]string `json:"preloaded_spark_versions,omitempty" url:"preloaded_spark_versions,omitempty"`
PreloadedDockerImages *[]clustersModels.DockerImage `json:"preloaded_docker_images,omitempty" url:"preloaded_docker_images,omitempty"`
AzureAttributes models.InstancePoolAzureAttributes `json:"azure_attributes,omitempty" url:"azure_attributes,omitempty"`
}
//CreateResp to compose the response of creation request
type CreateResp struct {
InstancePoolID string `json:"instance_pool_id,omitempty" url:"instance_pool_id,omitempty"`
}

Просмотреть файл

@ -0,0 +1,6 @@
package httpmodels
// DeleteReq is to compose the request to delete an instance pool
type DeleteReq struct {
InstancePoolID string `json:"instance_pool_id,omitempty" url:"instance_pool_id,omitempty"`
}

Просмотреть файл

@ -0,0 +1,11 @@
package httpmodels
//EditReq is to compose the request for modifying an instance pool
type EditReq struct {
InstancePoolID string `json:"instance_pool_id,omitempty" url:"instance_pool_id,omitempty"`
InstancePoolName string `json:"instance_pool_name,omitempty" url:"instance_pool_name,omitempty"`
MinIdleInstances int32 `json:"min_idle_instances,omitempty" url:"min_idle_instances,omitempty"`
MaxCapacity int32 `json:"max_capacity,omitempty" url:"max_capacity,omitempty"`
NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"`
IdleInstanceAutoterminationMinutes int32 `json:"idle_instance_autotermination_minutes,omitempty" url:"idle_instance_autotermination_minutes,omitempty"`
}

Просмотреть файл

@ -0,0 +1,28 @@
package httpmodels
import (
clustersModels "github.com/polar-rams/databricks-sdk-golang/azure/clusters/models"
"github.com/polar-rams/databricks-sdk-golang/azure/instance_pools/models"
)
//GetReq is to compose the request to get an instance pool
type GetReq struct {
InstancePoolID string `json:"instance_pool_id,omitempty" url:"instance_pool_id,omitempty"`
}
//GetResp is to compose the response for the get instance pool request
type GetResp struct {
InstancePoolName string `json:"instance_pool_name,omitempty" url:"instance_pool_name,omitempty"`
MinIdleInstances int32 `json:"min_idle_instances,omitempty" url:"min_idle_instances,omitempty"`
MaxCapacity int32 `json:"max_capacity,omitempty" url:"max_capacity,omitempty"`
NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"`
CustomTags *[]clustersModels.ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"`
IdleInstanceAutoterminationMinutes int32 `json:"idle_instance_autotermination_minutes,omitempty" url:"idle_instance_autotermination_minutes,omitempty"`
EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"`
DiskSpec models.DiskSpec `json:"disk_spec,omitempty" url:"disk_spec,omitempty"`
PreloadedSparkVersions *[]string `json:"preloaded_spark_versions,omitempty" url:"preloaded_spark_versions,omitempty"`
DefaultTags *[]clustersModels.ClusterTag `json:"default_tags,omitempty" url:"default_tags,omitempty"`
State models.InstancePoolState `json:"state,omitempty" url:"state,omitempty"`
Stats models.InstancePoolStats `json:"stats,omitempty" url:"stats,omitempty"`
Status models.InstancePoolStatus `json:"status,omitempty" url:"status,omitempty"`
}

Просмотреть файл

@ -0,0 +1,10 @@
package httpmodels
import (
"github.com/polar-rams/databricks-sdk-golang/azure/instance_pools/models"
)
//ListResp is to compose the response to list an array of instance pools' stats
type ListResp struct {
InstancePools *[]models.InstancePoolStats `json:"instance_pools,omitempty" url:"instance_pools,omitempty"`
}

Просмотреть файл

@ -0,0 +1,8 @@
package models
type AzureDiskVolumeType string
const (
AzureDiskVolumeTypePremiumLRS = "PREMIUM_LRS"
AzureDiskVolumeTypeStandardLRS = "STANDARD_LRS"
)

Просмотреть файл

@ -0,0 +1,7 @@
package models
type DiskSpec struct {
DiskType DiskType `json:"disk_type,omitempty" url:"disk_type,omitempty"`
DiskCount int32 `json:"disk_count,omitempty" url:"disk_count,omitempty"`
DiskSize int32 `json:"disk_size,omitempty" url:"disk_size,omitempty"`
}

Просмотреть файл

@ -0,0 +1,5 @@
package models
type DiskType struct {
AzureDiskVolumeType AzureDiskVolumeType `json:"azure_disk_volume_type,omitempty" url:"azure_disk_volume_type,omitempty"`
}

Просмотреть файл

@ -0,0 +1,21 @@
package models
import (
clusterModels "github.com/polar-rams/databricks-sdk-golang/azure/clusters/models"
)
type InstancePoolAndStats struct {
InstancePoolName string `json:"instance_pool_name,omitempty" url:"instance_pool_name,omitempty"`
MinIdleInstances int32 `json:"min_idle_instances,omitempty" url:"min_idle_instances,omitempty"`
MaxCapacity int32 `json:"max_capacity,omitempty" url:"max_capacity,omitempty"`
NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"`
CustomTags *[]clusterModels.ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"`
IdleInstanceAutoterminationMinutes int32 `json:"idle_instance_autotermination_minutes,omitempty" url:"idle_instance_autotermination_minutes,omitempty"`
EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"`
DiskSpec DiskSpec `json:"disk_spec,omitempty" url:"disk_spec,omitempty"`
PreloadedSparkVersions *[]string `json:"preloaded_spark_versions,omitempty" url:"preloaded_spark_versions,omitempty"`
InstancePoolID string `json:"instance_pool_id,omitempty" url:"instance_pool_id,omitempty"`
DefaultTags *[]clusterModels.ClusterTag `json:"default_tags,omitempty" url:"default_tags,omitempty"`
State InstancePoolState `json:"state,omitempty" url:"state,omitempty"`
Stats InstancePoolStats `json:"stats,omitempty" url:"stats,omitempty"`
}

Просмотреть файл

@ -0,0 +1,10 @@
package models
import (
clusterModels "github.com/polar-rams/databricks-sdk-golang/azure/clusters/models"
)
type InstancePoolAzureAttributes struct {
Availability clusterModels.AzureAvailability `json:"availability,omitempty" url:"availability,omitempty"`
SpotBidMaxPrice float64 `json:"spot_bid_max_price,omitempty" url:"spot_bid_max_price,omitempty"`
}

Просмотреть файл

@ -0,0 +1,8 @@
package models
type InstancePoolState string
const (
InstancePoolStateActive = "ACTIVE"
InstancePoolStateDeleted = "DELETED"
)

Просмотреть файл

@ -0,0 +1,8 @@
package models
type InstancePoolStats struct {
UsedCount int32 `json:"used_count,omitempty" url:"used_count,omitempty"`
IdleCount int32 `json:"idle_count,omitempty" url:"idle_count,omitempty"`
PendingUsedCount int32 `json:"pending_used_count,omitempty" url:"pending_used_count,omitempty"`
PendingIdleCount int32 `json:"pending_idle_count,omitempty" url:"pending_idle_count,omitempty"`
}

Просмотреть файл

@ -0,0 +1,5 @@
package models
type InstancePoolStatus struct {
PendingInstanceErrors *[]PendingInstanceError `json:"pending_instance_errors,omitempty" url:"pending_instance_errors,omitempty"`
}

Просмотреть файл

@ -0,0 +1,6 @@
package models
type PendingInstanceError struct {
InstanceID string `json:"instance_id,omitempty" url:"instance_id,omitempty"`
Message string `json:"message,omitempty" url:"message,omitempty"`
}

165
azure/jobs.go Normal file
Просмотреть файл

@ -0,0 +1,165 @@
package azure
import (
"encoding/json"
"net/http"
"github.com/polar-rams/databricks-sdk-golang/azure/jobs/httpmodels"
)
// JobsAPI exposes Jobs API endpoints
type JobsAPI struct {
Client DBClient
}
func (a JobsAPI) init(client DBClient) JobsAPI {
a.Client = client
return a
}
// Create creates a new job
func (a JobsAPI) Create(req httpmodels.CreateReq) (httpmodels.CreateResp, error) {
var resp httpmodels.CreateResp
jsonResp, err := a.Client.performQuery(http.MethodPost, "/jobs/create", req, nil)
if err != nil {
return resp, err
}
err = json.Unmarshal(jsonResp, &resp)
return resp, err
}
// List lists all jobs
func (a JobsAPI) List() (httpmodels.ListResp, error) {
var resp httpmodels.ListResp
jsonResp, err := a.Client.performQuery(http.MethodGet, "/jobs/list", nil, nil)
if err != nil {
return resp, err
}
err = json.Unmarshal(jsonResp, &resp)
return resp, err
}
// Delete deletes a job by ID
func (a JobsAPI) Delete(req httpmodels.DeleteReq) error {
_, err := a.Client.performQuery(http.MethodPost, "/jobs/delete", req, nil)
return err
}
// Get gets a job by ID
func (a JobsAPI) Get(req httpmodels.GetReq) (httpmodels.GetResp, error) {
var resp httpmodels.GetResp
jsonResp, err := a.Client.performQuery(http.MethodGet, "/jobs/get", req, nil)
if err != nil {
return resp, err
}
err = json.Unmarshal(jsonResp, &resp)
return resp, err
}
// Reset overwrites job settings
func (a JobsAPI) Reset(req httpmodels.ResetReq) error {
_, err := a.Client.performQuery(http.MethodPost, "/jobs/reset", req, nil)
return err
}
// Update adds, changes, or removes specific settings of an existing job
func (a JobsAPI) Update(req httpmodels.UpdateReq) error {
_, err := a.Client.performQuery(http.MethodPost, "/jobs/update", req, nil)
return err
}
// RunNow runs a job now and return the run_id of the triggered run
func (a JobsAPI) RunNow(req httpmodels.RunNowReq) (httpmodels.RunNowResp, error) {
var resp httpmodels.RunNowResp
jsonResp, err := a.Client.performQuery(http.MethodPost, "/jobs/run-now", req, nil)
if err != nil {
return resp, err
}
err = json.Unmarshal(jsonResp, &resp)
return resp, err
}
// RunsSubmit submit a one-time run
func (a JobsAPI) RunsSubmit(req httpmodels.RunsSubmitReq) (httpmodels.RunsSubmitResp, error) {
var resp httpmodels.RunsSubmitResp
jsonResp, err := a.Client.performQuery(http.MethodPost, "/jobs/runs/submit", req, nil)
if err != nil {
return resp, err
}
err = json.Unmarshal(jsonResp, &resp)
return resp, err
}
// RunsList lists runs from most recently started to least
func (a JobsAPI) RunsList(req httpmodels.RunsListReq) (httpmodels.RunsListResp, error) {
var resp httpmodels.RunsListResp
jsonResp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/list", req, nil)
if err != nil {
return resp, err
}
err = json.Unmarshal(jsonResp, &resp)
return resp, err
}
// RunsGet retrieve the metadata of a run
func (a JobsAPI) RunsGet(req httpmodels.RunsGetReq) (httpmodels.RunsGetResp, error) {
var resp httpmodels.RunsGetResp
jsonResp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/get", req, nil)
if err != nil {
return resp, err
}
err = json.Unmarshal(jsonResp, &resp)
return resp, err
}
// RunsExport exports and retrieve the job run task
func (a JobsAPI) RunsExport(req httpmodels.RunsExportReq) (httpmodels.RunsExportResp, error) {
var resp httpmodels.RunsExportResp
jsonResp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/export", req, nil)
if err != nil {
return resp, err
}
err = json.Unmarshal(jsonResp, &resp)
return resp, err
}
// RunsCancel cancels a run
func (a JobsAPI) RunsCancel(req httpmodels.RunsCancelReq) error {
_, err := a.Client.performQuery(http.MethodPost, "/jobs/runs/cancel", req, nil)
return err
}
// RunsGetOutput retrieves the output of a run
func (a JobsAPI) RunsGetOutput(req httpmodels.RunsGetOutputReq) (httpmodels.RunsGetOutputResp, error) {
var resp httpmodels.RunsGetOutputResp
jsonResp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/get-output", req, nil)
if err != nil {
return resp, err
}
err = json.Unmarshal(jsonResp, &resp)
return resp, err
}
// RunsDelete deletes a non-active run. Returns an error if the run is active.
func (a JobsAPI) RunsDelete(req httpmodels.RunsDeleteReq) error {
_, err := a.Client.performQuery(http.MethodPost, "/jobs/runs/delete", req, nil)
return err
}

Просмотреть файл

@ -0,0 +1,28 @@
package httpmodels
import (
"github.com/polar-rams/databricks-sdk-golang/azure/jobs/models"
libraryModels "github.com/polar-rams/databricks-sdk-golang/azure/libraries/models"
)
type CreateReq struct {
ExistingCluster string `json:"run_id,omitempty" url:"run_id,omitempty"`
NewCluster *models.NewCluster `json:"new_cluster,omitempty" url:"new_cluster,omitempty"`
NotebookTask *models.NotebookTask `json:"notebook_task,omitempty" url:"notebook_task,omitempty"`
SparkJarTask *models.SparkJarTask `json:"spark_jar_task,omitempty" url:"spark_jar_task,omitempty"`
SparkPythonTask *models.SparkPythonTask `json:"spark_python_task,omitempty" url:"spark_python_task,omitempty"`
SparkSubmitTask *models.SparkSubmitTask `json:"spark_submit_task,omitempty" url:"spark_submit_task,omitempty"`
Name string `json:"name,omitempty" url:"name,omitempty"`
Libraries *[]libraryModels.Library `json:"libraries,omitempty" url:"libraries,omitempty"`
EmailNotifications *models.JobEmailNotifications `json:"email_notifications,omitempty" url:"email_notifications,omitempty"`
TimeoutSeconds int32 `json:"timeout_seconds,omitempty" url:"timeout_seconds,omitempty"`
MaxRetries int32 `json:"max_retries,omitempty" url:"max_retries,omitempty"`
MinRetryIntervalMillis int32 `json:"min_retry_interval_millis,omitempty" url:"min_retry_interval_millis,omitempty"`
RetryOnTimeout bool `json:"retry_on_timeout,omitempty" url:"retry_on_timeout,omitempty"`
Schedule *models.CronSchedule `json:"schedule,omitempty" url:"schedule,omitempty"`
MaxConcurrentRuns int32 `json:"max_concurrent_runs,omitempty" url:"max_concurrent_runs,omitempty"`
}
type CreateResp struct {
JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"`
}

Просмотреть файл

@ -0,0 +1,5 @@
package httpmodels
type DeleteReq struct {
JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"`
}

Просмотреть файл

@ -0,0 +1,14 @@
package httpmodels
import "github.com/polar-rams/databricks-sdk-golang/azure/jobs/models"
type GetReq struct {
JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"`
}
type GetResp struct {
JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"`
CreateorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"`
Settings *models.JobSettings `json:"settings,omitempty" url:"settings,omitempty"`
CreatedTime int64 `json:"created_time,omitempty" url:"created_time,omitempty"`
}

Просмотреть файл

@ -0,0 +1,9 @@
package httpmodels
import (
"github.com/polar-rams/databricks-sdk-golang/azure/jobs/models"
)
type ListResp struct {
Jobs *[]models.Job `json:"jobs,omitempty" url:"jobs,omitempty"`
}

Просмотреть файл

@ -0,0 +1,8 @@
package httpmodels
import "github.com/polar-rams/databricks-sdk-golang/azure/jobs/models"
type ResetReq struct {
JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"`
NewSettings *models.JobSettings `json:"new_settings,omitempty" url:"new_settings,omitempty"`
}

Просмотреть файл

@ -0,0 +1,14 @@
package httpmodels
type RunNowReq struct {
JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"`
JarParams *[]string `json:"jar_params,omitempty" url:"jar_params,omitempty"`
NotebookParams *map[string]string `json:"notebook_params,omitempty" url:"notebook_params,omitempty"`
PythonParams *[]string `json:"python_params,omitempty" url:"python_params,omitempty"`
SparkSubmitParams *[]string `json:"spark_submit_params,omitempty" url:"spark_submit_params,omitempty"`
}
type RunNowResp struct {
RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"`
NumberInJob int64 `json:"number_in_job,omitempty" url:"number_in_job,omitempty"`
}

Просмотреть файл

@ -0,0 +1,5 @@
package httpmodels
type RunsCancelReq struct {
RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"`
}

Просмотреть файл

@ -0,0 +1,5 @@
package httpmodels
type RunsDeleteReq struct {
RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"`
}

Просмотреть файл

@ -0,0 +1,12 @@
package httpmodels
import "github.com/polar-rams/databricks-sdk-golang/azure/jobs/models"
type RunsExportReq struct {
RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"`
ViewsToExport models.ViewsToExport `json:"views_to_export,omitempty" url:"views_to_export,omitempty"`
}
type RunsExportResp struct {
Views *[]models.ViewItem `json:"views,omitempty" url:"views,omitempty"`
}

Просмотреть файл

@ -0,0 +1,28 @@
package httpmodels
import "github.com/polar-rams/databricks-sdk-golang/azure/jobs/models"
type RunsGetReq struct {
RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"`
}
type RunsGetResp struct {
JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"`
RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"`
NumberInJob int64 `json:"number_in_job,omitempty" url:"number_in_job,omitempty"`
OriginalAttemptRunID int64 `json:"original_attempt_run_id,omitempty" url:"original_attempt_run_id,omitempty"`
State models.RunState `json:"state,omitempty" url:"state,omitempty"`
Schedule models.CronSchedule `json:"schedule,omitempty" url:"schedule,omitempty"`
Task *models.JobTask `json:"task,omitempty" url:"task,omitempty"`
ClusterSpec *models.ClusterSpec `json:"cluster_spec,omitempty" url:"cluster_spec,omitempty"`
ClusterInstance models.ClusterInstance `json:"cluster_instance,omitempty" url:"cluster_instance,omitempty"`
OverridingParameters *models.RunParameters `json:"overriding_parameters,omitempty" url:"overriding_parameters,omitempty"`
StartTime int64 `json:"start_time,omitempty" url:"start_time,omitempty"`
EndTime int64 `json:"end_time,omitempty" url:"end_time,omitempty"`
SetupDuration int64 `json:"setup_duration,omitempty" url:"setup_duration,omitempty"`
ExecutionDuration int64 `json:"execution_duration,omitempty" url:"execution_duration,omitempty"`
CleanupDuration int64 `json:"cleanup_duration,omitempty" url:"cleanup_duration,omitempty"`
Trigger models.TriggerType `json:"trigger,omitempty" url:"trigger,omitempty"`
CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"`
RunPageURL string `json:"run_page_url,omitempty" url:"run_page_url,omitempty"`
}

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше