Merge branch 'main' into gergo/objects_init

This commit is contained in:
KatKatKateryna 2024-02-12 13:00:00 +00:00
Родитель 589198f5f1 d60feb73a2
Коммит a2daa68c1c
63 изменённых файлов: 2357 добавлений и 997 удалений

Просмотреть файл

@ -1,14 +1,47 @@
version: 2.1
orbs:
python: circleci/python@2.0.3
codecov: codecov/codecov@3.2.2
codecov: codecov/codecov@3.3.0
jobs:
pre-commit:
parameters:
config_file:
default: ./.pre-commit-config.yaml
description: Optional, path to pre-commit config file.
type: string
cache_prefix:
default: ''
description: |
Optional cache prefix to be used on CircleCI. Can be used for cache busting or to ensure multiple jobs use different caches.
type: string
docker:
- image: speckle/pre-commit-runner:latest
resource_class: medium
steps:
- checkout
- restore_cache:
keys:
- cache-pre-commit-<<parameters.cache_prefix>>-{{ checksum "<<parameters.config_file>>" }}
- run:
name: Install pre-commit hooks
command: pre-commit install-hooks --config <<parameters.config_file>>
- save_cache:
key: cache-pre-commit-<<parameters.cache_prefix>>-{{ checksum "<<parameters.config_file>>" }}
paths:
- ~/.cache/pre-commit
- run:
name: Run pre-commit
command: pre-commit run --all-files
- run:
command: git --no-pager diff
name: git diff
when: on_fail
test:
machine:
image: ubuntu-2204:2023.02.1
docker_layer_caching: true
docker_layer_caching: false
resource_class: medium
parameters:
tag:
@ -52,6 +85,10 @@ jobs:
workflows:
main:
jobs:
- pre-commit:
filters:
tags:
only: /.*/
- test:
matrix:
parameters:
@ -62,6 +99,7 @@ workflows:
- deploy:
context: pypi
requires:
- pre-commit
- test
filters:
tags:

Просмотреть файл

@ -2,7 +2,7 @@ repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
hooks:
- id: ruff
rev: v0.0.186
rev: v0.1.6
- repo: https://github.com/commitizen-tools/commitizen
hooks:
@ -10,15 +10,15 @@ repos:
- id: commitizen-branch
stages:
- push
rev: v2.38.0
rev: v3.13.0
- repo: https://github.com/pycqa/isort
rev: v5.11.3
rev: 5.12.0
hooks:
- id: isort
- repo: https://github.com/psf/black
rev: 22.12.0
rev: 23.11.0
hooks:
- id: black
# It is recommended to specify the latest version of Python
@ -27,7 +27,7 @@ repos:
# https://pre-commit.com/#top_level-default_language_version
# language_version: python3.11
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
rev: v4.5.0
hooks:
- id: end-of-file-fixer
- id: trailing-whitespace

Просмотреть файл

@ -33,7 +33,7 @@ services:
retries: 30
minio:
image: "minio/minio"
image: "minio/minio:RELEASE.2023-10-25T06-33-25Z"
command: server /data --console-address ":9001"
restart: always
volumes:
@ -53,7 +53,7 @@ services:
# Speckle Server
#######
speckle-frontend:
image: speckle/speckle-frontend:2
image: speckle/speckle-frontend:latest
restart: always
ports:
- "0.0.0.0:8080:8080"
@ -61,7 +61,7 @@ services:
FILE_SIZE_LIMIT_MB: 100
speckle-server:
image: speckle/speckle-server:2
image: speckle/speckle-server:latest
restart: always
healthcheck:
test:
@ -112,7 +112,7 @@ services:
ENABLE_MP: "false"
preview-service:
image: speckle/speckle-preview-service:2
image: speckle/speckle-preview-service:latest
restart: always
depends_on:
speckle-server:
@ -124,7 +124,7 @@ services:
PG_CONNECTION_STRING: "postgres://speckle:speckle@postgres/speckle"
webhook-service:
image: speckle/speckle-webhook-service:2
image: speckle/speckle-webhook-service:latest
restart: always
depends_on:
speckle-server:
@ -135,7 +135,7 @@ services:
WAIT_HOSTS: postgres:5432
fileimport-service:
image: speckle/speckle-fileimport-service:2
image: speckle/speckle-fileimport-service:latest
restart: always
depends_on:
speckle-server:

1245
poetry.lock сгенерированный

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,6 +1,6 @@
[tool.poetry]
name = "specklepy"
version = "2.9.1"
version = "2.17.14"
description = "The Python SDK for Speckle 2.0"
readme = "README.md"
authors = ["Speckle Systems <devops@speckle.systems>"]
@ -10,21 +10,23 @@ documentation = "https://speckle.guide/dev/py-examples.html"
homepage = "https://speckle.systems/"
packages = [
{ include = "specklepy", from = "src" },
{ include = "speckle_automate", from = "src" },
]
[tool.poetry.dependencies]
python = ">=3.7.2, <4.0"
pydantic = "^2.0"
python = ">=3.8.0, <4.0"
pydantic = "^2.5"
appdirs = "^1.4.4"
gql = {extras = ["requests", "websockets"], version = "^3.3.0"}
gql = { extras = ["requests", "websockets"], version = "^3.3.0" }
ujson = "^5.3.0"
Deprecated = "^1.2.13"
stringcase = "^1.2.0"
attrs = "^23.1.0"
httpx = "^0.25.0"
[tool.poetry.group.dev.dependencies]
black = "^22.8.0"
black = "23.11.0"
isort = "^5.7.0"
pytest = "^7.1.3"
pytest-ordering = "^0.6"

Просмотреть файл

@ -0,0 +1,23 @@
"""This module contains an SDK for working with Speckle Automate."""
from speckle_automate.automation_context import AutomationContext
from speckle_automate.runner import execute_automate_function, run_function
from speckle_automate.schema import (
AutomateBase,
AutomationResult,
AutomationRunData,
AutomationStatus,
ObjectResultLevel,
ResultCase,
)
__all__ = [
"AutomationContext",
"AutomateBase",
"AutomationStatus",
"AutomationResult",
"AutomationRunData",
"ResultCase",
"ObjectResultLevel",
"run_function",
"execute_automate_function",
]

Просмотреть файл

@ -0,0 +1,420 @@
"""This module provides an abstraction layer above the Speckle Automate runtime."""
import time
from dataclasses import dataclass, field
from pathlib import Path
from typing import Any, Dict, List, Optional, Tuple, Union
import httpx
from gql import gql
from speckle_automate.schema import (
AutomateBase,
AutomationResult,
AutomationRunData,
AutomationStatus,
ObjectResultLevel,
ResultCase,
)
from specklepy.api import operations
from specklepy.api.client import SpeckleClient
from specklepy.logging.exceptions import SpeckleException
from specklepy.objects import Base
from specklepy.transports.memory import MemoryTransport
from specklepy.transports.server import ServerTransport
@dataclass
class AutomationContext:
"""A context helper class.
This class exposes methods to work with the Speckle Automate context inside
Speckle Automate functions.
An instance of AutomationContext is injected into every run of a function.
"""
automation_run_data: AutomationRunData
speckle_client: SpeckleClient
_server_transport: ServerTransport
_speckle_token: str
#: keep a memory transponrt at hand, to speed up things if needed
_memory_transport: MemoryTransport = field(default_factory=MemoryTransport)
#: added for performance measuring
_init_time: float = field(default_factory=time.perf_counter)
_automation_result: AutomationResult = field(default_factory=AutomationResult)
@classmethod
def initialize(
cls, automation_run_data: Union[str, AutomationRunData], speckle_token: str
) -> "AutomationContext":
"""Bootstrap the AutomateSDK from raw data.
Todo:
----
* bootstrap a structlog logger instance
* expose a logger, that ppl can use instead of print
"""
# parse the json value if its not an initialized project data instance
automation_run_data = (
automation_run_data
if isinstance(automation_run_data, AutomationRunData)
else AutomationRunData.model_validate_json(automation_run_data)
)
speckle_client = SpeckleClient(
automation_run_data.speckle_server_url,
automation_run_data.speckle_server_url.startswith("https"),
)
speckle_client.authenticate_with_token(speckle_token)
if not speckle_client.account:
msg = (
f"Could not autenticate to {automation_run_data.speckle_server_url}",
"with the provided token",
)
raise ValueError(msg)
server_transport = ServerTransport(
automation_run_data.project_id, speckle_client
)
return cls(automation_run_data, speckle_client, server_transport, speckle_token)
@property
def run_status(self) -> AutomationStatus:
"""Get the status of the automation run."""
return self._automation_result.run_status
@property
def status_message(self) -> Optional[str]:
"""Get the current status message."""
return self._automation_result.status_message
def elapsed(self) -> float:
"""Return the elapsed time in seconds since the initialization time."""
return time.perf_counter() - self._init_time
def receive_version(self) -> Base:
"""Receive the Speckle project version that triggered this automation run."""
commit = self.speckle_client.commit.get(
self.automation_run_data.project_id, self.automation_run_data.version_id
)
if not commit.referencedObject:
raise ValueError("The commit has no referencedObject, cannot receive it.")
base = operations.receive(
commit.referencedObject, self._server_transport, self._memory_transport
)
print(
f"It took {self.elapsed():.2f} seconds to receive",
f" the speckle version {self.automation_run_data.version_id}",
)
return base
def create_new_version_in_project(
self, root_object: Base, model_name: str, version_message: str = ""
) -> Tuple[str, str]:
"""Save a base model to a new version on the project.
Args:
root_object (Base): The Speckle base object for the new version.
model_id (str): For now please use a `branchName`!
version_message (str): The message for the new version.
"""
if model_name == self.automation_run_data.branch_name:
raise ValueError(
f"The target model: {model_name} cannot match the model"
f" that triggered this automation:"
f" {self.automation_run_data.model_id} /"
f" {self.automation_run_data.branch_name}"
)
branch = self.speckle_client.branch.get(
self.automation_run_data.project_id, model_name, 1
)
# we just check if it exists
if (not branch) or isinstance(branch, SpeckleException):
branch_create = self.speckle_client.branch.create(
self.automation_run_data.project_id,
model_name,
)
if isinstance(branch_create, Exception):
raise branch_create
model_id = branch_create
else:
model_id = branch.id
root_object_id = operations.send(
root_object,
[self._server_transport, self._memory_transport],
use_default_cache=False,
)
version_id = self.speckle_client.commit.create(
stream_id=self.automation_run_data.project_id,
object_id=root_object_id,
branch_name=model_name,
message=version_message,
source_application="SpeckleAutomate",
)
if isinstance(version_id, SpeckleException):
raise version_id
self._automation_result.result_versions.append(version_id)
return model_id, version_id
@property
def context_view(self) -> Optional[str]:
return self._automation_result.result_view
def set_context_view(
self,
# f"{model_id}@{version_id} or {model_id} "
resource_ids: Optional[List[str]] = None,
include_source_model_version: bool = True,
) -> None:
link_resources = (
[
f"{self.automation_run_data.model_id}@{self.automation_run_data.version_id}"
]
if include_source_model_version
else []
)
if resource_ids:
link_resources.extend(resource_ids)
if not link_resources:
raise Exception(
"We do not have enough resource ids to compose a context view"
)
self._automation_result.result_view = (
f"/projects/{self.automation_run_data.project_id}"
f"/models/{','.join(link_resources)}"
)
def report_run_status(self) -> None:
"""Report the current run status to the project of this automation."""
query = gql(
"""
mutation ReportFunctionRunStatus(
$automationId: String!,
$automationRevisionId: String!,
$automationRunId: String!,
$versionId: String!,
$functionId: String!,
$functionName: String!,
$functionLogo: String,
$runStatus: AutomationRunStatus!
$elapsed: Float!
$contextView: String
$resultVersionIds: [String!]!
$statusMessage: String
$objectResults: JSONObject
){
automationMutations {
functionRunStatusReport(input: {
automationId: $automationId
automationRevisionId: $automationRevisionId
automationRunId: $automationRunId
versionId: $versionId
functionRuns: [
{
functionId: $functionId
functionName: $functionName
functionLogo: $functionLogo
status: $runStatus,
contextView: $contextView,
elapsed: $elapsed,
resultVersionIds: $resultVersionIds,
statusMessage: $statusMessage
results: $objectResults
}]
})
}
}
"""
)
if self.run_status in [AutomationStatus.SUCCEEDED, AutomationStatus.FAILED]:
object_results = {
"version": "1.0.0",
"values": {
"objectResults": self._automation_result.model_dump(by_alias=True)[
"objectResults"
],
"blobIds": self._automation_result.blobs,
},
}
else:
object_results = None
params = {
"automationId": self.automation_run_data.automation_id,
"automationRevisionId": self.automation_run_data.automation_revision_id,
"automationRunId": self.automation_run_data.automation_run_id,
"versionId": self.automation_run_data.version_id,
"functionId": self.automation_run_data.function_id,
"functionName": self.automation_run_data.function_name,
"functionLogo": self.automation_run_data.function_logo,
"runStatus": self.run_status.value,
"statusMessage": self._automation_result.status_message,
"contextView": self._automation_result.result_view,
"elapsed": self.elapsed(),
"resultVersionIds": self._automation_result.result_versions,
"objectResults": object_results,
}
print(f"Reporting run status with content: {params}")
self.speckle_client.httpclient.execute(query, params)
def store_file_result(self, file_path: Union[Path, str]) -> None:
"""Save a file attached to the project of this automation."""
path_obj = (
Path(file_path).resolve() if isinstance(file_path, str) else file_path
)
class UploadResult(AutomateBase):
blob_id: str
file_name: str
upload_status: int
class BlobUploadResponse(AutomateBase):
upload_results: list[UploadResult]
if not path_obj.exists():
raise ValueError("The given file path doesn't exist")
files = {path_obj.name: open(str(path_obj), "rb")}
url = (
f"{self.automation_run_data.speckle_server_url}/api/stream/"
f"{self.automation_run_data.project_id}/blob"
)
data = (
httpx.post(
url,
files=files,
headers={"authorization": f"Bearer {self._speckle_token}"},
)
.raise_for_status()
.json()
)
upload_response = BlobUploadResponse.model_validate(data)
if len(upload_response.upload_results) != 1:
raise ValueError("Expecting one upload result.")
self._automation_result.blobs.extend(
[upload_result.blob_id for upload_result in upload_response.upload_results]
)
def mark_run_failed(self, status_message: str) -> None:
"""Mark the current run a failure."""
self._mark_run(AutomationStatus.FAILED, status_message)
def mark_run_success(self, status_message: Optional[str]) -> None:
"""Mark the current run a success with an optional message."""
self._mark_run(AutomationStatus.SUCCEEDED, status_message)
def _mark_run(
self, status: AutomationStatus, status_message: Optional[str]
) -> None:
duration = self.elapsed()
self._automation_result.status_message = status_message
self._automation_result.run_status = status
self._automation_result.elapsed = duration
msg = f"Automation run {status.value} after {duration:.2f} seconds."
print("\n".join([msg, status_message]) if status_message else msg)
def attach_error_to_objects(
self,
category: str,
object_ids: Union[str, List[str]],
message: Optional[str] = None,
metadata: Optional[Dict[str, Any]] = None,
visual_overrides: Optional[Dict[str, Any]] = None,
) -> None:
"""Add a new error case to the run results.
If the error cause has already created an error case,
the error will be extended with a new case refering to the causing objects.
Args:
error_tag (str): A short tag for the error type.
causing_object_ids (str[]): A list of object_id-s that are causing the error
error_messagge (Optional[str]): Optional error message.
metadata: User provided metadata key value pairs
visual_overrides: Case specific 3D visual overrides.
"""
self.attach_result_to_objects(
ObjectResultLevel.ERROR,
category,
object_ids,
message,
metadata,
visual_overrides,
)
def attach_warning_to_objects(
self,
category: str,
object_ids: Union[str, List[str]],
message: Optional[str] = None,
metadata: Optional[Dict[str, Any]] = None,
visual_overrides: Optional[Dict[str, Any]] = None,
) -> None:
"""Add a new warning case to the run results."""
self.attach_result_to_objects(
ObjectResultLevel.WARNING,
category,
object_ids,
message,
metadata,
visual_overrides,
)
def attach_info_to_objects(
self,
category: str,
object_ids: Union[str, List[str]],
message: Optional[str] = None,
metadata: Optional[Dict[str, Any]] = None,
visual_overrides: Optional[Dict[str, Any]] = None,
) -> None:
"""Add a new info case to the run results."""
self.attach_result_to_objects(
ObjectResultLevel.INFO,
category,
object_ids,
message,
metadata,
visual_overrides,
)
def attach_result_to_objects(
self,
level: ObjectResultLevel,
category: str,
object_ids: Union[str, List[str]],
message: Optional[str] = None,
metadata: Optional[Dict[str, Any]] = None,
visual_overrides: Optional[Dict[str, Any]] = None,
) -> None:
if isinstance(object_ids, list):
if len(object_ids) < 1:
raise ValueError(
f"Need atleast one object_id to report a(n) {level.value.upper()}"
)
id_list = object_ids
else:
id_list = [object_ids]
print(
f"Created new {level.value.upper()}"
f" category: {category} caused by: {message}"
)
self._automation_result.object_results.append(
ResultCase(
category=category,
level=level,
object_ids=id_list,
message=message,
metadata=metadata,
visual_overrides=visual_overrides,
)
)

Просмотреть файл

@ -0,0 +1,55 @@
"""Some useful helpers for working with automation data."""
import secrets
import string
from gql import gql
from specklepy.api.client import SpeckleClient
def register_new_automation(
speckle_client: SpeckleClient,
project_id: str,
model_id: str,
automation_id: str,
automation_name: str,
automation_revision_id: str,
) -> bool:
"""Register a new automation in the speckle server."""
query = gql(
"""
mutation CreateAutomation(
$projectId: String!
$modelId: String!
$automationName: String!
$automationId: String!
$automationRevisionId: String!
) {
automationMutations {
create(
input: {
projectId: $projectId
modelId: $modelId
automationName: $automationName
automationId: $automationId
automationRevisionId: $automationRevisionId
}
)
}
}
"""
)
params = {
"projectId": project_id,
"modelId": model_id,
"automationName": automation_name,
"automationId": automation_id,
"automationRevisionId": automation_revision_id,
}
return speckle_client.httpclient.execute(query, params)
def crypto_random_string(length: int) -> str:
"""Generate a semi crypto random string of a given length."""
alphabet = string.ascii_letters + string.digits
return "".join(secrets.choice(alphabet) for _ in range(length)).lower()

Просмотреть файл

@ -0,0 +1,191 @@
"""Function execution module.
Provides mechanisms to execute any function,
that conforms to the AutomateFunction "interface"
"""
import json
import sys
import traceback
from pathlib import Path
from typing import Callable, Optional, Tuple, TypeVar, Union, overload
from pydantic import create_model
from pydantic.json_schema import GenerateJsonSchema
from speckle_automate.automation_context import AutomationContext
from speckle_automate.schema import AutomateBase, AutomationRunData, AutomationStatus
T = TypeVar("T", bound=AutomateBase)
AutomateFunction = Callable[[AutomationContext, T], None]
AutomateFunctionWithoutInputs = Callable[[AutomationContext], None]
def _read_input_data(inputs_location: str) -> str:
input_path = Path(inputs_location)
if not input_path.exists():
raise ValueError(f"Cannot find the function inputs file at {input_path}")
return input_path.read_text()
def _parse_input_data(
input_location: str, input_schema: Optional[type[T]]
) -> Tuple[AutomationRunData, Optional[T], str]:
input_json_string = _read_input_data(input_location)
class FunctionRunData(AutomateBase):
speckle_token: str
automation_run_data: AutomationRunData
function_inputs: None = None
parser_model = FunctionRunData
if input_schema:
parser_model = create_model(
"FunctionRunDataWithInputs",
function_inputs=(input_schema, ...),
__base__=FunctionRunData,
)
input_data = parser_model.model_validate_json(input_json_string)
return (
input_data.automation_run_data,
input_data.function_inputs,
input_data.speckle_token,
)
@overload
def execute_automate_function(
automate_function: AutomateFunction[T],
input_schema: type[T],
) -> None:
...
@overload
def execute_automate_function(automate_function: AutomateFunctionWithoutInputs) -> None:
...
class AutomateGenerateJsonSchema(GenerateJsonSchema):
def generate(self, schema, mode="validation"):
json_schema = super().generate(schema, mode=mode)
json_schema["$schema"] = self.schema_dialect
return json_schema
def execute_automate_function(
automate_function: Union[AutomateFunction[T], AutomateFunctionWithoutInputs],
input_schema: Optional[type[T]] = None,
):
"""Runs the provided automate function with the input schema."""
# first arg is the python file name, we do not need that
args = sys.argv[1:]
if len(args) != 2:
raise ValueError("Incorrect number of arguments specified need 2")
# we rely on a command name convention to decide what to do.
# this is here, so that the function authors do not see any of this
command, argument = args
if command == "generate_schema":
path = Path(argument)
schema = json.dumps(
input_schema.model_json_schema(
by_alias=True, schema_generator=AutomateGenerateJsonSchema
)
if input_schema
else {}
)
path.write_text(schema)
elif command == "run":
automation_run_data, function_inputs, speckle_token = _parse_input_data(
argument, input_schema
)
automation_context = AutomationContext.initialize(
automation_run_data, speckle_token
)
if function_inputs:
automation_context = run_function(
automation_context,
automate_function, # type: ignore
function_inputs, # type: ignore
)
else:
automation_context = AutomationContext.initialize(
automation_run_data, speckle_token
)
automation_context = run_function(
automation_context,
automate_function, # type: ignore
)
exit_code = (
0 if automation_context.run_status == AutomationStatus.SUCCEEDED else 1
)
exit(exit_code)
else:
raise NotImplementedError(f"Command: '{command}' is not supported.")
@overload
def run_function(
automation_context: AutomationContext,
automate_function: AutomateFunction[T],
inputs: T,
) -> AutomationContext:
...
@overload
def run_function(
automation_context: AutomationContext,
automate_function: AutomateFunctionWithoutInputs,
) -> AutomationContext:
...
def run_function(
automation_context: AutomationContext,
automate_function: Union[AutomateFunction[T], AutomateFunctionWithoutInputs],
inputs: Optional[T] = None,
) -> AutomationContext:
"""Run the provided function with the automate sdk context."""
automation_context.report_run_status()
try:
# avoiding complex type gymnastics here on the internals.
# the external type overloads make this correct
if inputs:
automate_function(automation_context, inputs) # type: ignore
else:
automate_function(automation_context) # type: ignore
# the function author forgot to mark the function success
if automation_context.run_status not in [
AutomationStatus.FAILED,
AutomationStatus.SUCCEEDED,
]:
automation_context.mark_run_success(
"WARNING: Automate assumed a success status,"
" but it was not marked as so by the function."
)
except Exception:
trace = traceback.format_exc()
print(trace)
automation_context.mark_run_failed(
"Function error. Check the automation run logs for details."
)
finally:
if not automation_context.context_view:
automation_context.set_context_view()
automation_context.report_run_status()
return automation_context

Просмотреть файл

@ -0,0 +1,74 @@
""""""
from enum import Enum
from typing import Any, Dict, List, Optional
from pydantic import BaseModel, ConfigDict, Field
from stringcase import camelcase
class AutomateBase(BaseModel):
"""Use this class as a base model for automate related DTO."""
model_config = ConfigDict(alias_generator=camelcase, populate_by_name=True)
class AutomationRunData(BaseModel):
"""Values of the project / model that triggered the run of this function."""
project_id: str
model_id: str
branch_name: str
version_id: str
speckle_server_url: str
automation_id: str
automation_revision_id: str
automation_run_id: str
function_id: str
function_name: str
function_logo: Optional[str]
model_config = ConfigDict(
alias_generator=camelcase, populate_by_name=True, protected_namespaces=()
)
class AutomationStatus(str, Enum):
"""Set the status of the automation."""
INITIALIZING = "INITIALIZING"
RUNNING = "RUNNING"
FAILED = "FAILED"
SUCCEEDED = "SUCCEEDED"
class ObjectResultLevel(str, Enum):
"""Possible status message levels for object reports."""
INFO = "INFO"
WARNING = "WARNING"
ERROR = "ERROR"
class ResultCase(AutomateBase):
"""A result case."""
category: str
level: ObjectResultLevel
object_ids: List[str]
message: Optional[str]
metadata: Optional[Dict[str, Any]]
visual_overrides: Optional[Dict[str, Any]]
class AutomationResult(AutomateBase):
"""Schema accepted by the Speckle server as a result for an automation run."""
elapsed: float = 0
result_view: Optional[str] = None
result_versions: List[str] = Field(default_factory=list)
blobs: List[str] = Field(default_factory=list)
run_status: AutomationStatus = AutomationStatus.RUNNING
status_message: Optional[str] = None
object_results: list[ResultCase] = Field(default_factory=list)

Просмотреть файл

@ -1,17 +1,7 @@
import re
from typing import Dict
from warnings import warn
from deprecated import deprecated
from gql import Client
from gql.transport.exceptions import TransportServerError
from gql.transport.requests import RequestsHTTPTransport
from gql.transport.websockets import WebsocketsTransport
from specklepy.api import resources
from specklepy.api.credentials import Account, get_account_from_token
from specklepy.api.credentials import Account
from specklepy.api.resources import (
user,
active_user,
branch,
commit,
@ -20,11 +10,10 @@ from specklepy.api.resources import (
server,
stream,
subscriptions,
user,
)
from specklepy.logging import metrics
from specklepy.logging.exceptions import SpeckleException, SpeckleWarning
from specklepy.core.api.client import SpeckleClient as CoreSpeckleClient
from specklepy.logging import metrics
class SpeckleClient(CoreSpeckleClient):
@ -61,10 +50,16 @@ class SpeckleClient(CoreSpeckleClient):
DEFAULT_HOST = "speckle.xyz"
USE_SSL = True
def __init__(self, host: str = DEFAULT_HOST, use_ssl: bool = USE_SSL) -> None:
def __init__(
self,
host: str = DEFAULT_HOST,
use_ssl: bool = USE_SSL,
verify_certificate: bool = True,
) -> None:
super().__init__(
host=host,
use_ssl=use_ssl,
verify_certificate=verify_certificate,
)
self.account = Account()
@ -131,7 +126,9 @@ class SpeckleClient(CoreSpeckleClient):
Arguments:
token {str} -- an api token
"""
metrics.track(metrics.SDK, self.account, {"name": "Client Authenticate_deprecated"})
metrics.track(
metrics.SDK, self.account, {"name": "Client Authenticate_deprecated"}
)
return super().authenticate(token)
def authenticate_with_token(self, token: str) -> None:
@ -143,7 +140,9 @@ class SpeckleClient(CoreSpeckleClient):
Arguments:
token {str} -- an api token
"""
metrics.track(metrics.SDK, self.account, {"name": "Client Authenticate With Token"})
metrics.track(
metrics.SDK, self.account, {"name": "Client Authenticate With Token"}
)
return super().authenticate_with_token(token)
def authenticate_with_account(self, account: Account) -> None:
@ -155,5 +154,7 @@ class SpeckleClient(CoreSpeckleClient):
account {Account} -- the account object which can be found with
`get_default_account` or `get_local_accounts`
"""
metrics.track(metrics.SDK, self.account, {"name": "Client Authenticate With Account"})
metrics.track(
metrics.SDK, self.account, {"name": "Client Authenticate With Account"}
)
return super().authenticate_with_account(account)

Просмотреть файл

@ -1,20 +1,14 @@
import os
from typing import List, Optional
from pydantic import BaseModel, Field # pylint: disable=no-name-in-module
from specklepy.api.models import ServerInfo
from specklepy.core.helpers import speckle_path_provider
from specklepy.logging import metrics
from specklepy.logging.exceptions import SpeckleException
from specklepy.transports.sqlite import SQLiteTransport
# following imports seem to be unnecessary, but they need to stay
# to not break the scripts using these functions as non-core
from specklepy.core.api.credentials import (Account, UserInfo,
StreamWrapper, # deprecated
get_local_accounts as core_get_local_accounts,
get_account_from_token as core_get_account_from_token)
from specklepy.core.api.credentials import StreamWrapper # noqa: F401
from specklepy.core.api.credentials import Account, UserInfo # noqa: F401
from specklepy.core.api.credentials import (
get_account_from_token as core_get_account_from_token,
)
from specklepy.core.api.credentials import get_local_accounts as core_get_local_accounts
from specklepy.logging import metrics
def get_local_accounts(base_path: Optional[str] = None) -> List[Account]:
@ -35,11 +29,12 @@ def get_local_accounts(base_path: Optional[str] = None) -> List[Account]:
(acc for acc in accounts if acc.isDefault),
accounts[0] if accounts else None,
),
{"name": "Get Local Accounts"}
{"name": "Get Local Accounts"},
)
return accounts
def get_default_account(base_path: Optional[str] = None) -> Optional[Account]:
"""
Gets this environment's default account if any. If there is no default,
@ -62,6 +57,7 @@ def get_default_account(base_path: Optional[str] = None) -> Optional[Account]:
return default
def get_account_from_token(token: str, server_url: str = None) -> Account:
"""Gets the local account for the token if it exists
Arguments:
@ -73,5 +69,5 @@ def get_account_from_token(token: str, server_url: str = None) -> Account:
"""
account = core_get_account_from_token(token, server_url)
metrics.track( metrics.SDK, account, {"name": "Get Account From Token"} )
metrics.track(metrics.SDK, account, {"name": "Get Account From Token"})
return account

Просмотреть файл

@ -1,18 +1,74 @@
from dataclasses import dataclass
from enum import Enum
from unicodedata import name
# following imports seem to be unnecessary, but they need to stay
# to not break the scripts using these functions as non-core
from specklepy.core.api.host_applications import (HostApplication, HostAppVersion,
get_host_app_from_string,
from specklepy.core.api.host_applications import (
ARCGIS,
ARCHICAD,
AUTOCAD,
BLENDER,
CIVIL,
CSIBRIDGE,
DXF,
DYNAMO,
ETABS,
EXCEL,
GRASSHOPPER,
GSA,
MICROSTATION,
NET,
OPENBUILDINGS,
OPENRAIL,
OPENROADS,
OTHER,
POWERBI,
PYTHON,
QGIS,
REVIT,
RHINO,
SAFE,
SAP2000,
SKETCHUP,
TEKLASTRUCTURES,
TOPSOLID,
UNITY,
UNREAL,
HostApplication,
HostAppVersion,
_app_name_host_app_mapping,
RHINO,GRASSHOPPER,REVIT,DYNAMO,UNITY,GSA,
CIVIL,AUTOCAD,MICROSTATION,OPENROADS,
OPENRAIL,OPENBUILDINGS,ETABS,SAP2000,CSIBRIDGE,
SAFE,TEKLASTRUCTURES,DXF,EXCEL,UNREAL,POWERBI,
BLENDER,QGIS,ARCGIS,SKETCHUP,ARCHICAD,TOPSOLID,
PYTHON,NET,OTHER)
get_host_app_from_string,
)
if __name__ == "__main__":
print(HostAppVersion.v)
# re-exporting stuff from the moved api module
__all__ = [
"ARCGIS",
"ARCHICAD",
"AUTOCAD",
"BLENDER",
"CIVIL",
"CSIBRIDGE",
"DXF",
"DYNAMO",
"ETABS",
"EXCEL",
"GRASSHOPPER",
"GSA",
"MICROSTATION",
"NET",
"OPENBUILDINGS",
"OPENRAIL",
"OPENROADS",
"OTHER",
"POWERBI",
"PYTHON",
"QGIS",
"REVIT",
"RHINO",
"SAFE",
"SAP2000",
"SKETCHUP",
"TEKLASTRUCTURES",
"TOPSOLID",
"UNITY",
"UNREAL",
"HostApplication",
"HostAppVersion",
"_app_name_host_app_mapping",
"get_host_app_from_string",
]

Просмотреть файл

@ -1,12 +1,35 @@
from datetime import datetime
from typing import List, Optional
from pydantic import BaseModel, Field
# following imports seem to be unnecessary, but they need to stay
# to not break the scripts using these functions as non-core
from specklepy.core.api.models import (Collaborator, Commit,
Commits, Object, Branch, Branches,
Stream, Streams, User, LimitedUser,
PendingStreamCollaborator, Activity,
ActivityCollection, ServerInfo)
from specklepy.core.api.models import (
Activity,
ActivityCollection,
Branch,
Branches,
Collaborator,
Commit,
Commits,
LimitedUser,
Object,
PendingStreamCollaborator,
ServerInfo,
Stream,
Streams,
User,
)
__all__ = [
"Activity",
"ActivityCollection",
"Branch",
"Branches",
"Collaborator",
"Commit",
"Commits",
"LimitedUser",
"Object",
"PendingStreamCollaborator",
"ServerInfo",
"Stream",
"Streams",
"User",
]

Просмотреть файл

@ -1,16 +1,12 @@
from typing import List, Optional
from specklepy.core.api.operations import deserialize as core_deserialize
from specklepy.core.api.operations import receive as _untracked_receive
from specklepy.core.api.operations import send as core_send
from specklepy.core.api.operations import serialize as core_serialize
from specklepy.logging import metrics
from specklepy.logging.exceptions import SpeckleException
from specklepy.objects.base import Base
from specklepy.serialization.base_object_serializer import BaseObjectSerializer
from specklepy.transports.abstract_transport import AbstractTransport
from specklepy.transports.sqlite import SQLiteTransport
from specklepy.core.api.operations import (send as core_send,
receive as _untracked_receive,
serialize as core_serialize,
deserialize as core_deserialize)
def send(
@ -74,6 +70,7 @@ def serialize(base: Base, write_transports: List[AbstractTransport] = []) -> str
metrics.track(metrics.SDK, custom_props={"name": "Serialize"})
return core_serialize(base, write_transports)
def deserialize(
obj_string: str, read_transport: Optional[AbstractTransport] = None
) -> Base:

Просмотреть файл

@ -1,21 +1,8 @@
from threading import Lock
from typing import Any, Dict, List, Optional, Tuple, Type, Union
from typing import Any, Optional, Tuple
from gql.client import Client
from gql.transport.exceptions import TransportQueryError
from graphql import DocumentNode
from specklepy.api.credentials import Account
from specklepy.logging.exceptions import (
GraphQLException,
SpeckleException,
UnsupportedException,
)
from specklepy.serialization.base_object_serializer import BaseObjectSerializer
from specklepy.transports.sqlite import SQLiteTransport
# following imports seem to be unnecessary, but they need to stay
# to not break the scripts using these functions as non-core
from specklepy.core.api.resource import ResourceBase as CoreResourceBase
@ -29,10 +16,9 @@ class ResourceBase(CoreResourceBase):
server_version: Optional[Tuple[Any, ...]] = None,
) -> None:
super().__init__(
account = account,
basepath = basepath,
client = client,
name = name,
server_version = server_version
account=account,
basepath=basepath,
client=client,
name=name,
server_version=server_version,
)

Просмотреть файл

@ -1,14 +1,9 @@
from datetime import datetime, timezone
from datetime import datetime
from typing import List, Optional
from gql import gql
from specklepy.api.models import ActivityCollection, PendingStreamCollaborator, User
from specklepy.api.resource import ResourceBase
from specklepy.logging import metrics
from specklepy.logging.exceptions import SpeckleException
from specklepy.api.models import PendingStreamCollaborator, User
from specklepy.core.api.resources.active_user import Resource as CoreResource
from specklepy.logging import metrics
class Resource(CoreResource):
@ -97,7 +92,9 @@ class Resource(CoreResource):
List[PendingStreamCollaborator]
-- a list of pending invites for the current user
"""
metrics.track(metrics.SDK, self.account, {"name": "User Active Invites All Get"})
metrics.track(
metrics.SDK, self.account, {"name": "User Active Invites All Get"}
)
return super().get_all_pending_invites()
def get_pending_invite(

Просмотреть файл

@ -1,12 +1,8 @@
from typing import Optional
from gql import gql
from specklepy.api.models import Branch
from specklepy.api.resource import ResourceBase
from specklepy.logging import metrics
from specklepy.core.api.resources.branch import Resource as CoreResource
from specklepy.logging import metrics
class Resource(CoreResource):

Просмотреть файл

@ -1,12 +1,9 @@
from typing import List, Optional
from gql import gql
from typing import List, Optional, Union
from specklepy.api.models import Commit
from specklepy.api.resource import ResourceBase
from specklepy.logging import metrics
from specklepy.core.api.resources.commit import Resource as CoreResource
from specklepy.logging import metrics
from specklepy.logging.exceptions import SpeckleException
class Resource(CoreResource):
@ -55,8 +52,8 @@ class Resource(CoreResource):
branch_name: str = "main",
message: str = "",
source_application: str = "python",
parents: List[str] = None,
) -> str:
parents: Optional[List[str]] = None,
) -> Union[str, SpeckleException]:
"""
Creates a commit on a branch
@ -76,7 +73,9 @@ class Resource(CoreResource):
str -- the id of the created commit
"""
metrics.track(metrics.SDK, self.account, {"name": "Commit Create"})
return super().create(stream_id, object_id, branch_name, message, source_application, parents)
return super().create(
stream_id, object_id, branch_name, message, source_application, parents
)
def update(self, stream_id: str, commit_id: str, message: str) -> bool:
"""

Просмотреть файл

@ -1,13 +1,8 @@
from typing import Dict, List
from gql import gql
from specklepy.api.resource import ResourceBase
from specklepy.objects.base import Base
from specklepy.logging import metrics
from specklepy.core.api.resources.object import Resource as CoreResource
from specklepy.logging import metrics
from specklepy.objects.base import Base
class Resource(CoreResource):
@ -58,4 +53,3 @@ class Resource(CoreResource):
"""
metrics.track(metrics.SDK, self.account, {"name": "Object Create"})
return super().create(stream_id, objects)

Просмотреть файл

@ -1,15 +1,11 @@
from datetime import datetime, timezone
from datetime import datetime
from typing import List, Optional, Union
from gql import gql
from specklepy.api.models import ActivityCollection, LimitedUser
from specklepy.api.resource import ResourceBase
from specklepy.core.api.resources.other_user import Resource as CoreResource
from specklepy.logging import metrics
from specklepy.logging.exceptions import SpeckleException
from specklepy.core.api.resources.other_user import Resource as CoreResource
class Resource(CoreResource):
"""API Access class for other users, that are not the currently active user."""
@ -84,4 +80,3 @@ class Resource(CoreResource):
"""
metrics.track(metrics.SDK, self.account, {"name": "Other User Activity"})
return super().activity(user_id, limit, action_type, before, after, cursor)

Просмотреть файл

@ -1,14 +1,8 @@
import re
from typing import Any, Dict, List, Tuple
from gql import gql
from specklepy.api.models import ServerInfo
from specklepy.api.resource import ResourceBase
from specklepy.logging import metrics
from specklepy.logging.exceptions import GraphQLException
from specklepy.core.api.resources.server import Resource as CoreResource
from specklepy.logging import metrics
class Resource(CoreResource):

Просмотреть файл

@ -1,15 +1,9 @@
from datetime import datetime, timezone
from datetime import datetime
from typing import List, Optional
from deprecated import deprecated
from gql import gql
from specklepy.api.models import ActivityCollection, PendingStreamCollaborator, Stream
from specklepy.api.resource import ResourceBase
from specklepy.logging import metrics
from specklepy.logging.exceptions import SpeckleException, UnsupportedException
from specklepy.api.models import PendingStreamCollaborator, Stream
from specklepy.core.api.resources.stream import Resource as CoreResource
from specklepy.logging import metrics
class Resource(CoreResource):
@ -256,7 +250,11 @@ class Resource(CoreResource):
Returns:
bool -- True if the operation was successful
"""
metrics.track(metrics.SDK, self.account, {"name": "Stream Permission Update", "role": role})
metrics.track(
metrics.SDK,
self.account,
{"name": "Stream Permission Update", "role": role},
)
return super().update_permission(stream_id, user_id, role)
def revoke_permission(self, stream_id: str, user_id: str):
@ -301,4 +299,3 @@ class Resource(CoreResource):
"""
metrics.track(metrics.SDK, self.account, {"name": "Stream Activity"})
return super().activity(stream_id, action_type, limit, before, after, cursor)

Просмотреть файл

@ -1,15 +1,12 @@
from functools import wraps
from typing import Callable, Dict, List, Optional, Union
from gql import gql
from graphql import DocumentNode
from specklepy.api.resource import ResourceBase
from specklepy.api.resources.stream import Stream
from specklepy.core.api.resources.subscriptions import Resource as CoreResource
from specklepy.logging import metrics
from specklepy.logging.exceptions import SpeckleException
from specklepy.logging import metrics
from specklepy.core.api.resources.subscriptions import Resource as CoreResource
def check_wsclient(function):
@wraps(function)
@ -64,7 +61,9 @@ class Resource(CoreResource):
Returns:
Stream -- the update stream
"""
metrics.track(metrics.SDK, self.account, {"name": "Subscription Stream Updated"})
metrics.track(
metrics.SDK, self.account, {"name": "Subscription Stream Updated"}
)
return super().stream_updated(id, callback)
@check_wsclient
@ -83,7 +82,9 @@ class Resource(CoreResource):
Returns:
dict -- dict containing 'id' of stream removed and optionally 'revokedBy'
"""
metrics.track(metrics.SDK, self.account, {"name": "Subscription Stream Removed"})
metrics.track(
metrics.SDK, self.account, {"name": "Subscription Stream Removed"}
)
return super().stream_removed(callback)
@check_wsclient

Просмотреть файл

@ -1,16 +1,12 @@
from datetime import datetime, timezone
from datetime import datetime
from typing import List, Optional, Union
from deprecated import deprecated
from gql import gql
from specklepy.api.models import ActivityCollection, PendingStreamCollaborator, User
from specklepy.api.resource import ResourceBase
from specklepy.logging.exceptions import SpeckleException
from specklepy.logging import metrics
from specklepy.api.models import PendingStreamCollaborator, User
from specklepy.core.api.resources.user import Resource as CoreResource
from specklepy.logging import metrics
from specklepy.logging.exceptions import SpeckleException
DEPRECATION_VERSION = "2.9.0"
DEPRECATION_TEXT = (
@ -83,7 +79,7 @@ class Resource(CoreResource):
Returns:
bool -- True if your profile was updated successfully
"""
#metrics.track(metrics.USER, self.account, {"name": "update"})
# metrics.track(metrics.USER, self.account, {"name": "update"})
metrics.track(metrics.SDK, self.account, {"name": "User Update_deprecated"})
return super().update(name, company, bio, avatar)
@ -119,7 +115,6 @@ class Resource(CoreResource):
metrics.track(metrics.SDK, self.account, {"name": "User Activity_deprecated"})
return super().activity(user_id, limit, action_type, before, after, cursor)
@deprecated(version=DEPRECATION_VERSION, reason=DEPRECATION_TEXT)
def get_all_pending_invites(self) -> List[PendingStreamCollaborator]:
"""Get all of the active user's pending stream invites
@ -130,11 +125,12 @@ class Resource(CoreResource):
List[PendingStreamCollaborator]
-- a list of pending invites for the current user
"""
#metrics.track(metrics.INVITE, self.account, {"name": "get"})
metrics.track(metrics.SDK, self.account, {"name": "User GetAllInvites_deprecated"})
# metrics.track(metrics.INVITE, self.account, {"name": "get"})
metrics.track(
metrics.SDK, self.account, {"name": "User GetAllInvites_deprecated"}
)
return super().get_all_pending_invites()
@deprecated(version=DEPRECATION_VERSION, reason=DEPRECATION_TEXT)
def get_pending_invite(
self, stream_id: str, token: Optional[str] = None
@ -152,7 +148,6 @@ class Resource(CoreResource):
PendingStreamCollaborator
-- the invite for the given stream (or None if it isn't found)
"""
#metrics.track(metrics.INVITE, self.account, {"name": "get"})
# metrics.track(metrics.INVITE, self.account, {"name": "get"})
metrics.track(metrics.SDK, self.account, {"name": "User GetInvite_deprecated"})
return super().get_pending_invite(stream_id, token)

Просмотреть файл

@ -1,17 +1,9 @@
from urllib.parse import unquote, urlparse
from warnings import warn
from specklepy.api.client import SpeckleClient
from specklepy.api.credentials import (
Account,
get_account_from_token,
get_local_accounts,
)
from specklepy.logging.exceptions import SpeckleException, SpeckleWarning
from specklepy.api.credentials import Account
from specklepy.core.api.wrapper import StreamWrapper as CoreStreamWrapper
from specklepy.logging import metrics
from specklepy.transports.server.server import ServerTransport
from specklepy.logging import metrics
from specklepy.core.api.wrapper import StreamWrapper as CoreStreamWrapper
class StreamWrapper(CoreStreamWrapper):
"""
@ -51,7 +43,7 @@ class StreamWrapper(CoreStreamWrapper):
_account: Account = None
def __init__(self, url: str) -> None:
super().__init__(url = url)
super().__init__(url=url)
def get_account(self, token: str = None) -> Account:
"""
@ -90,5 +82,7 @@ class StreamWrapper(CoreStreamWrapper):
ServerTransport -- constructed for this stream
with a pre-authenticated client
"""
metrics.track(metrics.SDK, custom_props={"name": "Stream Wrapper Get Transport"})
metrics.track(
metrics.SDK, custom_props={"name": "Stream Wrapper Get Transport"}
)
return super().get_transport(token)

Просмотреть файл

@ -11,7 +11,6 @@ from gql.transport.websockets import WebsocketsTransport
from specklepy.core.api import resources
from specklepy.core.api.credentials import Account, get_account_from_token
from specklepy.core.api.resources import (
user,
active_user,
branch,
commit,
@ -20,6 +19,7 @@ from specklepy.core.api.resources import (
server,
stream,
subscriptions,
user,
)
from specklepy.logging import metrics
from specklepy.logging.exceptions import SpeckleException, SpeckleWarning
@ -59,7 +59,12 @@ class SpeckleClient:
DEFAULT_HOST = "speckle.xyz"
USE_SSL = True
def __init__(self, host: str = DEFAULT_HOST, use_ssl: bool = USE_SSL) -> None:
def __init__(
self,
host: str = DEFAULT_HOST,
use_ssl: bool = USE_SSL,
verify_certificate: bool = True,
) -> None:
ws_protocol = "ws"
http_protocol = "http"
@ -74,9 +79,12 @@ class SpeckleClient:
self.graphql = f"{self.url}/graphql"
self.ws_url = f"{ws_protocol}://{host}/graphql"
self.account = Account()
self.verify_certificate = verify_certificate
self.httpclient = Client(
transport=RequestsHTTPTransport(url=self.graphql, verify=True, retries=3)
transport=RequestsHTTPTransport(
url=self.graphql, verify=self.verify_certificate, retries=3
)
)
self.wsclient = None
@ -150,7 +158,7 @@ class SpeckleClient:
"apollographql-client-version": metrics.HOST_APP_VERSION,
}
httptransport = RequestsHTTPTransport(
url=self.graphql, headers=headers, verify=True, retries=3
url=self.graphql, headers=headers, verify=self.verify_certificate, retries=3
)
wstransport = WebsocketsTransport(
url=self.ws_url,

Просмотреть файл

@ -6,7 +6,6 @@ from pydantic import BaseModel, Field # pylint: disable=no-name-in-module
from specklepy.core.api.models import ServerInfo
from specklepy.core.helpers import speckle_path_provider
from specklepy.logging import metrics
from specklepy.logging.exceptions import SpeckleException
from specklepy.transports.sqlite import SQLiteTransport
@ -110,7 +109,7 @@ def get_default_account(base_path: Optional[str] = None) -> Optional[Account]:
if not default:
default = accounts[0]
default.isDefault = True
#metrics.initialise_tracker(default)
# metrics.initialise_tracker(default)
return default

Просмотреть файл

@ -196,3 +196,4 @@ class ServerInfo(BaseModel):
scopes: Optional[List[dict]] = None
authStrategies: Optional[List[dict]] = None
version: Optional[str] = None
frontend2: Optional[bool] = None

Просмотреть файл

@ -1,6 +1,6 @@
from typing import List, Optional
#from specklepy.logging import metrics
# from specklepy.logging import metrics
from specklepy.logging.exceptions import SpeckleException
from specklepy.objects.base import Base
from specklepy.serialization.base_object_serializer import BaseObjectSerializer

Просмотреть файл

@ -3,7 +3,11 @@ from typing import List, Optional
from gql import gql
from specklepy.core.api.models import ActivityCollection, PendingStreamCollaborator, User
from specklepy.core.api.models import (
ActivityCollection,
PendingStreamCollaborator,
User,
)
from specklepy.core.api.resource import ResourceBase
from specklepy.logging.exceptions import SpeckleException

Просмотреть файл

@ -4,6 +4,7 @@ from gql import gql
from specklepy.core.api.models import Branch
from specklepy.core.api.resource import ResourceBase
from specklepy.logging.exceptions import SpeckleException
NAME = "branch"
@ -39,6 +40,10 @@ class Resource(ResourceBase):
}
"""
)
if len(name) < 3:
return SpeckleException(
message="Branch Name must be at least 3 characters"
)
params = {
"branch": {
"streamId": stream_id,

Просмотреть файл

@ -1,9 +1,10 @@
from typing import List, Optional
from typing import List, Optional, Union
from gql import gql
from specklepy.core.api.models import Commit
from specklepy.core.api.resource import ResourceBase
from specklepy.logging.exceptions import SpeckleException
NAME = "commit"
@ -106,8 +107,8 @@ class Resource(ResourceBase):
branch_name: str = "main",
message: str = "",
source_application: str = "python",
parents: List[str] = None,
) -> str:
parents: Optional[List[str]] = None,
) -> Union[str, SpeckleException]:
"""
Creates a commit on a branch

Просмотреть файл

@ -1,6 +1,7 @@
import re
from typing import Any, Dict, List, Tuple
import requests
from gql import gql
from specklepy.core.api.models import ServerInfo
@ -56,9 +57,21 @@ class Resource(ResourceBase):
"""
)
return self.make_request(
server_info = self.make_request(
query=query, return_type="serverInfo", schema=ServerInfo
)
if isinstance(server_info, ServerInfo) and isinstance(
server_info.canonicalUrl, str
):
r = requests.get(
server_info.canonicalUrl, headers={"User-Agent": "specklepy SDK"}
)
if "x-speckle-frontend-2" in r.headers:
server_info.frontend2 = True
else:
server_info.frontend2 = False
return server_info
def version(self) -> Tuple[Any, ...]:
"""Get the server version

Просмотреть файл

@ -1,10 +1,13 @@
from datetime import datetime, timezone
from typing import List, Optional
from deprecated import deprecated
from gql import gql
from specklepy.core.api.models import ActivityCollection, PendingStreamCollaborator, Stream
from specklepy.core.api.models import (
ActivityCollection,
PendingStreamCollaborator,
Stream,
)
from specklepy.core.api.resource import ResourceBase
from specklepy.logging.exceptions import SpeckleException, UnsupportedException
@ -163,7 +166,10 @@ class Resource(ResourceBase):
}
"""
)
if len(name) < 3 and len(name) != 0:
return SpeckleException(
message="Stream Name must be at least 3 characters"
)
params = {
"stream": {"name": name, "description": description, "isPublic": is_public}
}
@ -508,7 +514,6 @@ class Resource(ResourceBase):
if user_id is not None
]
params = {"input": [*email_invites, *user_invites]}
return self.make_request(

Просмотреть файл

@ -4,9 +4,12 @@ from typing import List, Optional, Union
from deprecated import deprecated
from gql import gql
from specklepy.core.api.models import ActivityCollection, PendingStreamCollaborator, User
from specklepy.core.api.models import (
ActivityCollection,
PendingStreamCollaborator,
User,
)
from specklepy.core.api.resource import ResourceBase
from specklepy.logging.exceptions import SpeckleException
NAME = "user"

Просмотреть файл

@ -1,6 +1,8 @@
from urllib.parse import unquote, urlparse
from urllib.parse import quote, unquote, urlparse
from warnings import warn
from gql import gql
from specklepy.core.api.client import SpeckleClient
from specklepy.core.api.credentials import (
Account,
@ -45,6 +47,7 @@ class StreamWrapper:
commit_id: str = None
object_id: str = None
branch_name: str = None
model_id: str = None
_client: SpeckleClient = None
_account: Account = None
@ -81,8 +84,18 @@ class StreamWrapper:
" provided."
)
# check for fe2 URL
if "/projects/" in parsed.path:
use_fe2 = True
key_stream = "project"
else:
use_fe2 = False
key_stream = "stream"
while segments:
segment = segments.pop(0)
if use_fe2 is False:
if segments and segment.lower() == "streams":
self.stream_id = segments.pop(0)
elif segments and segment.lower() == "commits":
@ -100,10 +113,57 @@ class StreamWrapper:
f"Cannot parse {url} into a stream wrapper class - invalid URL"
" provided."
)
elif segments and use_fe2 is True:
if segment.lower() == "projects":
self.stream_id = segments.pop(0)
elif segment.lower() == "models":
next_segment = segments.pop(0)
if "," in next_segment:
raise SpeckleException("Multi-model urls are not supported yet")
elif unquote(next_segment).startswith("$"):
raise SpeckleException(
"Federation model urls are not supported"
)
elif len(next_segment) == 32:
self.object_id = next_segment
else:
self.branch_name = unquote(next_segment).split("@")[0]
if "@" in unquote(next_segment):
self.commit_id = unquote(next_segment).split("@")[1]
else:
raise SpeckleException(
f"Cannot parse {url} into a stream wrapper class - invalid URL"
" provided."
)
if use_fe2 is True and self.branch_name is not None:
self.model_id = self.branch_name
# get branch name
query = gql(
"""
query Project($project_id: String!, $model_id: String!) {
project(id: $project_id) {
id
model(id: $model_id) {
name
}
}
}
"""
)
self._client = self.get_client()
params = {"project_id": self.stream_id, "model_id": self.model_id}
project = self._client.httpclient.execute(query, params)
try:
self.branch_name = project["project"]["model"]["name"]
except KeyError as ke:
raise SpeckleException("Project model name is not found", ke)
if not self.stream_id:
raise SpeckleException(
f"Cannot parse {url} into a stream wrapper class - no stream id found."
f"Cannot parse {url} into a stream wrapper class - no {key_stream} id found."
)
@property
@ -184,3 +244,46 @@ class StreamWrapper:
if not self._account or not self._account.token:
self.get_account(token)
return ServerTransport(self.stream_id, account=self._account)
def to_string(self) -> str:
"""
Constructs a URL depending on the StreamWrapper type and FE version.
"""
use_fe2 = False
key_streams = "/streams/"
key_branches = "/branches/"
if isinstance(self.branch_name, str):
value_branch = quote(self.branch_name)
if self.branch_name == "globals":
key_branches = "/"
key_commits = "/commits/"
if isinstance(self.commit_id, str) and self.branch_name == "globals":
key_commits = "/globals/"
key_objects = "/objects/"
if "/projects/" in self.stream_url:
use_fe2 = True
key_streams = "/projects/"
key_branches = "/models/"
value_branch = self.model_id
key_commits = "@"
key_objects = "/models/"
wrapper_type = self.type
if use_fe2 is False or (use_fe2 is True and not self.model_id):
base_url = f"{self.server_url}{key_streams}{self.stream_id}"
else: # fe2 is True and model_id available
base_url = f"{self.server_url}{key_streams}{self.stream_id}{key_branches}{value_branch}"
if wrapper_type == "object":
return f"{base_url}{key_objects}{self.object_id}"
elif wrapper_type == "commit":
return f"{base_url}{key_commits}{self.commit_id}"
elif wrapper_type == "branch":
return f"{self.server_url}{key_streams}{self.stream_id}{key_branches}{value_branch}"
elif wrapper_type == "stream":
return f"{self.server_url}{key_streams}{self.stream_id}"
else:
raise SpeckleException(
f"Cannot parse StreamWrapper of type '{wrapper_type}'"
)

Просмотреть файл

@ -142,7 +142,7 @@ class MetricsTracker(metaclass=Singleton):
def hash(self, value: str):
inputList = value.lower().split("://")
input = inputList[len(inputList)-1].split("/")[0].split('?')[0]
input = inputList[len(inputList) - 1].split("/")[0].split("?")[0]
return hashlib.md5(input.encode("utf-8")).hexdigest().upper()
def _send_tracking_requests(self):

Просмотреть файл

@ -1,4 +1,5 @@
from typing import Optional
from specklepy.objects import Base
@ -12,5 +13,3 @@ class CRS(Base, speckle_type="Objects.GIS.CRS"):
offset_x: Optional[float] = None
offset_y: Optional[float] = None
rotation: Optional[float] = None

Просмотреть файл

@ -1,21 +1,14 @@
"""Builtin Speckle object kit."""
from specklepy.objects.GIS.layers import (
VectorLayer,
RasterLayer,
)
from specklepy.objects.GIS.CRS import CRS
from specklepy.objects.GIS.geometry import (
GisPolygonGeometry,
GisPolygonElement,
GisLineElement,
GisPointElement,
GisPolygonElement,
GisPolygonGeometry,
GisRasterElement,
)
from specklepy.objects.GIS.CRS import (
CRS,
)
from specklepy.objects.GIS.layers import RasterLayer, VectorLayer
__all__ = [
"VectorLayer",

Просмотреть файл

@ -1,14 +1,15 @@
from typing import Optional, Union, List
from specklepy.objects.geometry import (
Point,
Line,
Polyline,
Circle,
Arc,
Polycurve,
Mesh,
)
from typing import List, Optional, Union
from specklepy.objects import Base
from specklepy.objects.geometry import (
Arc,
Circle,
Line,
Mesh,
Point,
Polycurve,
Polyline,
)
class GisPolygonGeometry(

Просмотреть файл

@ -1,10 +1,11 @@
from typing import Any, Dict, List, Union, Optional
from specklepy.objects.base import Base
from specklepy.objects.other import Collection
from typing import Any, Dict, List, Optional, Union
from specklepy.objects.GIS.CRS import CRS
from deprecated import deprecated
from specklepy.objects.base import Base
from specklepy.objects.GIS.CRS import CRS
from specklepy.objects.other import Collection
@deprecated(version="2.15", reason="Use VectorLayer or RasterLayer instead")
class Layer(Base, detachable={"features"}):
@ -38,7 +39,6 @@ class VectorLayer(
speckle_type="VectorLayer",
serialize_ignore={"features"},
):
"""GIS Vector Layer"""
name: Optional[str] = None
@ -67,7 +67,6 @@ class RasterLayer(
speckle_type="RasterLayer",
serialize_ignore={"features"},
):
"""GIS Raster Layer"""
name: Optional[str] = None
@ -89,13 +88,12 @@ class RasterLayer(
self.elements = value
class VectorLayer(
class VectorLayer( # noqa: F811
Collection,
detachable={"elements"},
speckle_type="Objects.GIS.VectorLayer",
serialize_ignore={"features"},
):
"""GIS Vector Layer"""
name: Optional[str] = None
@ -117,13 +115,12 @@ class VectorLayer(
self.elements = value
class RasterLayer(
class RasterLayer( # noqa: F811
Collection,
detachable={"elements"},
speckle_type="Objects.GIS.RasterLayer",
serialize_ignore={"features"},
):
"""GIS Raster Layer"""
name: Optional[str] = None

Просмотреть файл

@ -188,7 +188,8 @@ class _RegisteringBase:
cls._detachable = cls._detachable.union(detachable)
if serialize_ignore:
cls._serialize_ignore = cls._serialize_ignore.union(serialize_ignore)
super().__init_subclass__(**kwargs)
# we know, that the super here is object, that takes no args on init subclass
return super().__init_subclass__()
# T = TypeVar("T")

Просмотреть файл

@ -319,7 +319,7 @@ class Spiral(Base, speckle_type=GEOMETRY + "Spiral", detachable={"displayValue"}
startPoint: Optional[Point] = None
endPoint: Optional[Point]
plane: Optional[Plane]
turns: Optional[int]
turns: Optional[float]
pitchAxis: Optional[Vector] = Vector()
pitch: float = 0
spiralType: Optional[SpiralType] = None

Просмотреть файл

@ -1,17 +1,18 @@
from abc import ABC, abstractmethod
from dataclasses import dataclass
from typing import Any, Collection, Dict, Generic, Iterable, List, Optional, Tuple, TypeVar
from typing import Any, Collection, Dict, Generic, Iterable, Optional, Tuple, TypeVar
from attrs import define
from specklepy.objects.base import Base
ROOT: str = "__Root"
T = TypeVar('T')
T = TypeVar("T")
PARENT_INFO = Tuple[Optional[str], str]
@define(slots=True)
class CommitObjectBuilder(ABC, Generic[T]):
converted: Dict[str, Base]
_parent_infos: Dict[str, Collection[PARENT_INFO]]
@ -26,14 +27,17 @@ class CommitObjectBuilder(ABC, Generic[T]):
def build_commit_object(self, root_commit_object: Base) -> None:
self.apply_relationships(self.converted.values(), root_commit_object)
def set_relationship(self, app_id: Optional[str], *parent_info : PARENT_INFO) -> None:
def set_relationship(
self, app_id: Optional[str], *parent_info: PARENT_INFO
) -> None:
if not app_id:
return
self._parent_infos[app_id] = parent_info
def apply_relationships(self, to_add: Iterable[Base], root_commit_object: Base) -> None:
def apply_relationships(
self, to_add: Iterable[Base], root_commit_object: Base
) -> None:
for c in to_add:
try:
self.apply_relationship(c, root_commit_object)
@ -41,20 +45,25 @@ class CommitObjectBuilder(ABC, Generic[T]):
print(f"Failed to add object {type(c)} to commit object: {ex}")
def apply_relationship(self, current: Base, root_commit_object: Base):
if not current.applicationId: raise Exception(f"Expected applicationId to have been set")
if not current.applicationId:
raise Exception("Expected applicationId to have been set")
parents = self._parent_infos[current.applicationId]
for (parent_id, prop_name) in parents:
if not parent_id: continue
for parent_id, prop_name in parents:
if not parent_id:
continue
parent: Optional[Base]
if parent_id == ROOT:
parent = root_commit_object
else:
parent = self.converted[parent_id] if parent_id in self.converted else None
parent = (
self.converted[parent_id] if parent_id in self.converted else None
)
if not parent: continue
if not parent:
continue
try:
elements = get_detached_prop(parent, prop_name)
@ -66,18 +75,26 @@ class CommitObjectBuilder(ABC, Generic[T]):
return
except Exception as ex:
# A parent was found, but it was invalid (Likely because of a type mismatch on a `elements` property)
print(f"Failed to add object {type(current)} to a converted parent; {ex}")
print(
f"Failed to add object {type(current)} to a converted parent; {ex}"
)
raise Exception(f"Could not find a valid parent for object of type {type(current)}. Checked {len(parents)} potential parent, and non were converted!")
raise Exception(
f"Could not find a valid parent for object of type {type(current)}. Checked {len(parents)} potential parent, and non were converted!"
)
def get_detached_prop(speckle_object: Base, prop_name: str) -> Optional[Any]:
detached_prop_name = get_detached_prop_name(speckle_object, prop_name)
return getattr(speckle_object, detached_prop_name, None)
def set_detached_prop(speckle_object: Base, prop_name: str, value: Optional[Any]) -> None:
def set_detached_prop(
speckle_object: Base, prop_name: str, value: Optional[Any]
) -> None:
detached_prop_name = get_detached_prop_name(speckle_object, prop_name)
setattr(speckle_object, detached_prop_name, value)
def get_detached_prop_name(speckle_object: Base, prop_name: str) -> str:
return prop_name if hasattr(speckle_object, prop_name) else f"@{prop_name}"

Просмотреть файл

@ -41,7 +41,6 @@ class TraversalContext:
@final
@define(slots=True, frozen=True)
class GraphTraversal:
_rules: List[ITraversalRule]
def traverse(self, root: Base) -> Iterator[TraversalContext]:
@ -58,15 +57,15 @@ class GraphTraversal:
members_to_traverse = active_rule.get_members_to_traverse(current)
for child_prop in members_to_traverse:
try:
if child_prop in {"speckle_type", "units", "applicationId"}: continue #debug: to avoid noisy exceptions, explicitly avoid checking ones we know will fail, this is not exhaustive
if child_prop in {"speckle_type", "units", "applicationId"}:
continue # debug: to avoid noisy exceptions, explicitly avoid checking ones we know will fail, this is not exhaustive
if getattr(current, child_prop, None):
value = current[child_prop]
self._traverse_member_to_stack(
stack, value, child_prop, head
)
except KeyError as ex:
self._traverse_member_to_stack(stack, value, child_prop, head)
except KeyError:
# Unset application ids, and class variables like SpeckleType will throw when __getitem__ is called
pass
@staticmethod
def _traverse_member_to_stack(
stack: List[TraversalContext],
@ -78,10 +77,14 @@ class GraphTraversal:
stack.append(TraversalContext(value, member_name, parent))
elif isinstance(value, list):
for obj in value:
GraphTraversal._traverse_member_to_stack(stack, obj, member_name, parent)
GraphTraversal._traverse_member_to_stack(
stack, obj, member_name, parent
)
elif isinstance(value, dict):
for obj in value.values():
GraphTraversal._traverse_member_to_stack(stack, obj, member_name, parent)
GraphTraversal._traverse_member_to_stack(
stack, obj, member_name, parent
)
@staticmethod
def traverse_member(value: Optional[Any]) -> Iterator[Base]:
@ -96,7 +99,6 @@ class GraphTraversal:
for o in GraphTraversal.traverse_member(obj):
yield o
def _get_active_rule_or_default_rule(self, o: Base) -> ITraversalRule:
return self._get_active_rule(o) or _default_rule

Просмотреть файл

@ -1,7 +1,8 @@
from typing import Any, List, Optional
from deprecated import deprecated
from specklepy.objects.geometry import Point, Vector
from specklepy.objects.geometry import Plane, Point, Polyline, Vector
from .base import Base
@ -71,6 +72,19 @@ class DisplayStyle(Base, speckle_type=OTHER + "DisplayStyle"):
lineweight: float = 0
class Text(Base, speckle_type=OTHER + "Text"):
"""
Text object to render it on viewer.
"""
plane: Plane
value: str
height: float
rotation: float
displayValue: Optional[List[Polyline]] = None
richText: Optional[str] = None
class Transform(
Base,
speckle_type=OTHER + "Transform",
@ -247,9 +261,7 @@ class BlockDefinition(
geometry: Optional[List[Base]] = None
class Instance(
Base, speckle_type=OTHER + "Instance", detachable={"definition"}
):
class Instance(Base, speckle_type=OTHER + "Instance", detachable={"definition"}):
transform: Optional[Transform] = None
definition: Optional[Base] = None
@ -268,6 +280,7 @@ class BlockInstance(
def blockDefinition(self, value: Optional[BlockDefinition]) -> None:
self.definition = value
class RevitInstance(Instance, speckle_type=OTHER_REVIT + "RevitInstance"):
level: Optional[Base] = None
facingFlipped: bool
@ -275,10 +288,9 @@ class RevitInstance(Instance, speckle_type=OTHER_REVIT + "RevitInstance"):
parameters: Optional[Base] = None
elementId: Optional[str]
# TODO: prob move this into a built elements module, but just trialling this for now
class RevitParameter(
Base, speckle_type="Objects.BuiltElements.Revit.Parameter"
):
class RevitParameter(Base, speckle_type="Objects.BuiltElements.Revit.Parameter"):
name: Optional[str] = None
value: Any = None
applicationUnitType: Optional[str] = None # eg UnitType UT_Length
@ -290,6 +302,7 @@ class RevitParameter(
isReadOnly: bool = False
isTypeParameter: bool = False
class Collection(
Base, speckle_type="Speckle.Core.Models.Collection", detachable={"elements"}
):

Просмотреть файл

@ -6,10 +6,7 @@ from specklepy.objects.structural.analysis import (
ModelSettings,
ModelUnits,
)
from specklepy.objects.structural.axis import (
AxisType,
Axis
)
from specklepy.objects.structural.axis import Axis, AxisType
from specklepy.objects.structural.geometry import (
Element1D,
Element2D,

Просмотреть файл

@ -1,5 +1,5 @@
from typing import Optional
from enum import Enum
from typing import Optional
from specklepy.objects.base import Base
from specklepy.objects.geometry import Plane

Просмотреть файл

@ -106,7 +106,9 @@ def get_encoding_from_units(unit: Union[Units, str, None]):
def get_scale_factor_from_string(fromUnits: str, toUnits: str) -> float:
"""Returns a scalar to convert distance values from one unit system to another"""
return get_scale_factor(get_units_from_string(fromUnits), get_units_from_string(toUnits))
return get_scale_factor(
get_units_from_string(fromUnits), get_units_from_string(toUnits)
)
def get_scale_factor(fromUnits: Units, toUnits: Units) -> float:

Просмотреть файл

@ -1,4 +1,4 @@
from typing import Any, Dict, List
from typing import Dict, List
from specklepy.transports.abstract_transport import AbstractTransport

Просмотреть файл

@ -1,5 +1,5 @@
import json
from typing import Any, Dict, List, Optional
from typing import Dict, List, Optional
from warnings import warn
import requests

Просмотреть файл

@ -1,7 +1,7 @@
import os
import sqlite3
from contextlib import closing
from typing import Any, Dict, List, Optional, Tuple
from typing import Dict, List, Optional, Tuple
from specklepy.core.helpers import speckle_path_provider
from specklepy.logging.exceptions import SpeckleException

Просмотреть файл

Просмотреть файл

@ -0,0 +1,285 @@
"""Run integration tests with a speckle server."""
import os
from pathlib import Path
from typing import Dict
import pytest
from gql import gql
from speckle_automate import (
AutomationContext,
AutomationRunData,
AutomationStatus,
run_function,
)
from speckle_automate.helpers import crypto_random_string, register_new_automation
from speckle_automate.schema import AutomateBase
from specklepy.api import operations
from specklepy.api.client import SpeckleClient
from specklepy.objects.base import Base
from specklepy.transports.server import ServerTransport
@pytest.fixture
def speckle_token(user_dict: Dict[str, str]) -> str:
"""Provide a speckle token for the test suite."""
return user_dict["token"]
@pytest.fixture
def speckle_server_url(host: str) -> str:
"""Provide a speckle server url for the test suite, default to localhost."""
return f"http://{host}"
@pytest.fixture
def test_client(speckle_server_url: str, speckle_token: str) -> SpeckleClient:
"""Initialize a SpeckleClient for testing."""
test_client = SpeckleClient(speckle_server_url, use_ssl=False)
test_client.authenticate_with_token(speckle_token)
return test_client
@pytest.fixture
def test_object() -> Base:
"""Create a Base model for testing."""
root_object = Base()
root_object.foo = "bar"
return root_object
@pytest.fixture
def automation_run_data(
test_object: Base, test_client: SpeckleClient, speckle_server_url: str
) -> AutomationRunData:
"""Set up an automation context for testing."""
project_id = test_client.stream.create("Automate function e2e test")
branch_name = "main"
model = test_client.branch.get(project_id, branch_name, commits_limit=1)
model_id: str = model.id
root_obj_id = operations.send(
test_object, [ServerTransport(project_id, test_client)]
)
version_id = test_client.commit.create(project_id, root_obj_id)
automation_name = crypto_random_string(10)
automation_id = crypto_random_string(10)
automation_revision_id = crypto_random_string(10)
register_new_automation(
test_client,
project_id,
model_id,
automation_id,
automation_name,
automation_revision_id,
)
automation_run_id = crypto_random_string(10)
function_id = crypto_random_string(10)
function_name = f"automate test {crypto_random_string(3)}"
return AutomationRunData(
project_id=project_id,
model_id=model_id,
branch_name=branch_name,
version_id=version_id,
speckle_server_url=speckle_server_url,
automation_id=automation_id,
automation_revision_id=automation_revision_id,
automation_run_id=automation_run_id,
function_id=function_id,
function_name=function_name,
function_logo=None,
)
@pytest.fixture
def automation_context(
automation_run_data: AutomationRunData, speckle_token: str
) -> AutomationContext:
"""Set up the run context."""
return AutomationContext.initialize(automation_run_data, speckle_token)
def get_automation_status(
project_id: str,
model_id: str,
speckle_client: SpeckleClient,
):
query = gql(
"""
query AutomationRuns(
$projectId: String!
$modelId: String!
)
{
project(id: $projectId) {
model(id: $modelId) {
automationStatus {
id
status
statusMessage
automationRuns {
id
automationId
versionId
createdAt
updatedAt
status
functionRuns {
id
functionId
elapsed
status
contextView
statusMessage
results
resultVersions {
id
}
}
}
}
}
}
}
"""
)
params = {
"projectId": project_id,
"modelId": model_id,
}
response = speckle_client.httpclient.execute(query, params)
return response["project"]["model"]["automationStatus"]
class FunctionInputs(AutomateBase):
forbidden_speckle_type: str
def automate_function(
automation_context: AutomationContext,
function_inputs: FunctionInputs,
) -> None:
"""Hey, trying the automate sdk experience here."""
version_root_object = automation_context.receive_version()
count = 0
if version_root_object.speckle_type == function_inputs.forbidden_speckle_type:
if not version_root_object.id:
raise ValueError("Cannot operate on objects without their id's.")
automation_context.attach_error_to_objects(
"Forbidden speckle_type",
version_root_object.id,
"This project should not contain the type: "
f"{function_inputs.forbidden_speckle_type}",
)
count += 1
if count > 0:
automation_context.mark_run_failed(
"Automation failed: "
f"Found {count} object that have a forbidden speckle type: "
f"{function_inputs.forbidden_speckle_type}"
)
else:
automation_context.mark_run_success("No forbidden types found.")
def test_function_run(automation_context: AutomationContext) -> None:
"""Run an integration test for the automate function."""
automation_context = run_function(
automation_context,
automate_function,
FunctionInputs(forbidden_speckle_type="Base"),
)
assert automation_context.run_status == AutomationStatus.FAILED
status = get_automation_status(
automation_context.automation_run_data.project_id,
automation_context.automation_run_data.model_id,
automation_context.speckle_client,
)
assert status["status"] == automation_context.run_status
status_message = status["automationRuns"][0]["functionRuns"][0]["statusMessage"]
assert status_message == automation_context.status_message
@pytest.fixture
def test_file_path():
path = Path(f"./{crypto_random_string(10)}").resolve()
yield path
os.remove(path)
def test_file_uploads(
automation_run_data: AutomationRunData, speckle_token: str, test_file_path: Path
):
"""Test file store capabilities of the automate sdk."""
automation_context = AutomationContext.initialize(
automation_run_data, speckle_token
)
test_file_path.write_text("foobar")
automation_context.store_file_result(test_file_path)
assert len(automation_context._automation_result.blobs) == 1
def test_create_version_in_project_raises_error_for_same_model(
automation_context: AutomationContext,
) -> None:
with pytest.raises(ValueError):
automation_context.create_new_version_in_project(
Base(), automation_context.automation_run_data.branch_name
)
def test_create_version_in_project(
automation_context: AutomationContext,
) -> None:
root_object = Base()
root_object.foo = "bar"
model_id, version_id = automation_context.create_new_version_in_project(
root_object, "foobar"
)
assert model_id is not None
assert version_id is not None
def test_set_context_view(automation_context: AutomationContext) -> None:
automation_context.set_context_view()
assert automation_context.context_view is not None
assert automation_context.context_view.endswith(
f"models/{automation_context.automation_run_data.model_id}@{automation_context.automation_run_data.version_id}"
)
automation_context.report_run_status()
automation_context._automation_result.result_view = None
dummy_context = "foo@bar"
automation_context.set_context_view([dummy_context])
assert automation_context.context_view is not None
assert automation_context.context_view.endswith(
f"models/{automation_context.automation_run_data.model_id}@{automation_context.automation_run_data.version_id},{dummy_context}"
)
automation_context.report_run_status()
automation_context._automation_result.result_view = None
dummy_context = "foo@baz"
automation_context.set_context_view(
[dummy_context], include_source_model_version=False
)
assert automation_context.context_view is not None
assert automation_context.context_view.endswith(f"models/{dummy_context}")
automation_context.report_run_status()

Просмотреть файл

@ -18,6 +18,7 @@ class TestServer:
server = client.server.get()
assert isinstance(server, ServerInfo)
assert isinstance(server.frontend2, bool)
def test_server_version(self, client: SpeckleClient):
version = client.server.version()

Просмотреть файл

@ -8,11 +8,7 @@ from specklepy.api.models import (
Stream,
User,
)
from specklepy.logging.exceptions import (
GraphQLException,
SpeckleException,
UnsupportedException,
)
from specklepy.logging.exceptions import GraphQLException, SpeckleException
@pytest.mark.run(order=3)

Просмотреть файл

@ -2,11 +2,13 @@ import json
import tempfile
from pathlib import Path
from typing import Iterable
from urllib.parse import unquote
import pytest
from specklepy.api.wrapper import StreamWrapper
from specklepy.core.helpers import speckle_path_provider
from specklepy.logging.exceptions import SpeckleException
@pytest.fixture(scope="module", autouse=True)
@ -29,6 +31,22 @@ def user_path() -> Iterable[Path]:
speckle_path_provider.override_application_data_path(None)
def test_parse_empty():
try:
StreamWrapper("https://testing.speckle.dev/streams")
assert False
except SpeckleException:
assert True
def test_parse_empty_fe2():
try:
StreamWrapper("https://latest.speckle.systems/projects")
assert False
except SpeckleException:
assert True
def test_parse_stream():
wrap = StreamWrapper("https://testing.speckle.dev/streams/a75ab4f10f")
assert wrap.type == "stream"
@ -126,3 +144,72 @@ def test_wrapper_url_match(user_path) -> None:
account = wrap.get_account()
assert account.userInfo.email is None
def test_parse_project():
wrap = StreamWrapper("https://latest.speckle.systems/projects/843d07eb10")
assert wrap.type == "stream"
def test_parse_model():
wrap = StreamWrapper(
"https://latest.speckle.systems/projects/843d07eb10/models/d9eb4918c8"
)
assert wrap.branch_name == "building wrapper"
assert wrap.type == "branch"
def test_parse_federated_model():
try:
StreamWrapper("https://latest.speckle.systems/projects/843d07eb10/models/$main")
assert False
except SpeckleException:
assert True
def test_parse_multi_model():
try:
StreamWrapper(
"https://latest.speckle.systems/projects/2099ac4b5f/models/1870f279e3,a9cfdddc79"
)
assert False
except SpeckleException:
assert True
def test_parse_object_fe2():
wrap = StreamWrapper(
"https://latest.speckle.systems/projects/24c3741255/models/b48d1b10f5a732f4ca4144286391282c"
)
assert wrap.type == "object"
def test_parse_version():
wrap = StreamWrapper(
"https://latest.speckle.systems/projects/843d07eb10/models/4e7345c838@c42d5cbac1"
)
wrap_quoted = StreamWrapper(
"https://latest.speckle.systems/projects/843d07eb10/models/4e7345c838%40c42d5cbac1"
)
assert wrap.type == "commit"
assert wrap_quoted.type == "commit"
def test_to_string():
urls = [
"https://testing.speckle.dev/streams/a75ab4f10f",
"https://testing.speckle.dev/streams/4c3ce1459c/branches/%F0%9F%8D%95%E2%AC%85%F0%9F%8C%9F%20you%20wat%3F",
"https://testing.speckle.dev/streams/0c6ad366c4/globals",
"https://testing.speckle.dev/streams/0c6ad366c4/globals/abd3787893",
"https://testing.speckle.dev/streams/4c3ce1459c/commits/8b9b831792",
"https://testing.speckle.dev/streams/a75ab4f10f/objects/5530363e6d51c904903dafc3ea1d2ec6",
"https://latest.speckle.systems/projects/843d07eb10",
"https://latest.speckle.systems/projects/843d07eb10/models/4e7345c838",
"https://latest.speckle.systems/projects/843d07eb10/models/4e7345c838@c42d5cbac1",
"https://latest.speckle.systems/projects/843d07eb10/models/4e7345c838%40c42d5cbac1",
"https://latest.speckle.systems/projects/24c3741255/models/b48d1b10f5a732f4ca4144286391282c",
]
for url in urls:
wrap = StreamWrapper(url)
assert unquote(wrap.to_string()) == unquote(url)

Просмотреть файл

@ -107,7 +107,12 @@ fake_bases = [FakeBase("foo"), FakeBase("bar")]
fake_bases,
),
(List["int"], [2, 3, 4], True, [2, 3, 4]),
(Union[float, Dict[str, float]], {"foo": 1, "bar": 2}, True, {"foo": 1.0, "bar": 2.0}),
(
Union[float, Dict[str, float]],
{"foo": 1, "bar": 2},
True,
{"foo": 1.0, "bar": 2.0},
),
(Union[float, Dict[str, float]], {"foo": "bar"}, False, {"foo": "bar"}),
],
)

Просмотреть файл

@ -1,5 +1,3 @@
import pytest
from specklepy.objects.units import Units, get_scale_factor
@ -8,7 +6,7 @@ from specklepy.objects.units import Units, get_scale_factor
@pytest.mark.parametrize(
"fromUnits, toUnits, inValue, expectedOutValue",
[
#To self
# To self
(Units.km, Units.km, 1.5, 1.5),
(Units.km, Units.km, 0, 0),
(Units.m, Units.m, 1.5, 1.5),
@ -23,24 +21,20 @@ from specklepy.objects.units import Units, get_scale_factor
(Units.yards, Units.yards, 0, 0),
(Units.feet, Units.feet, 1.5, 1.5),
(Units.feet, Units.feet, 0, 0),
#To Meters
# To Meters
(Units.km, Units.m, 987654.321, 987654321),
(Units.m, Units.m, 987654.321, 987654.321),
(Units.mm, Units.m, 98765432.1, 98765.4321),
(Units.cm, Units.m, 9876543.21, 98765.4321),
#To negative meters
# To negative meters
(Units.km, Units.m, -987654.321, -987654321),
(Units.m, Units.m,- 987654.321, -987654.321),
(Units.m, Units.m, -987654.321, -987654.321),
(Units.mm, Units.m, -98765432.1, -98765.4321),
(Units.cm, Units.m, -9876543.21, -98765.4321),
(Units.m, Units.km, 987654.321, 987.654321),
(Units.m, Units.cm, 987654.321, 98765432.1),
(Units.m, Units.mm, 987654.321, 987654321),
#Imperial
# Imperial
(Units.miles, Units.m, 123.45, 198673.517),
(Units.miles, Units.inches, 123.45, 7821792),
(Units.yards, Units.m, 123.45, 112.88268),
@ -50,7 +44,9 @@ from specklepy.objects.units import Units, get_scale_factor
(Units.inches, Units.m, 123.45, 3.13563),
],
)
def test_get_scale_factor_between_units(fromUnits: Units, toUnits: Units, inValue: float, expectedOutValue: float):
def test_get_scale_factor_between_units(
fromUnits: Units, toUnits: Units, inValue: float, expectedOutValue: float
):
Tolerance = 1e-10
actual = inValue * get_scale_factor(fromUnits, toUnits)
assert(actual - expectedOutValue < Tolerance)
assert actual - expectedOutValue < Tolerance

Просмотреть файл

@ -3,9 +3,9 @@ Provides uniform and consistent path helpers for `specklepy`
"""
import os
import sys
from importlib import import_module, invalidate_caches
from pathlib import Path
from typing import Optional
from importlib import import_module, invalidate_caches
_user_data_env_var = "SPECKLE_USERDATA_PATH"
@ -55,9 +55,7 @@ def user_application_data_path() -> Path:
if sys.platform.startswith("win"):
app_data_path = os.getenv("APPDATA")
if not app_data_path:
raise Exception(
"Cannot get appdata path from environment."
)
raise Exception("Cannot get appdata path from environment.")
return Path(app_data_path)
else:
# try getting the standard XDG_DATA_HOME value
@ -68,9 +66,7 @@ def user_application_data_path() -> Path:
else:
return _ensure_folder_exists(Path.home(), ".config")
except Exception as ex:
raise Exception(
"Failed to initialize user application data path.", ex
)
raise Exception("Failed to initialize user application data path.", ex)
def user_speckle_folder_path() -> Path:
@ -90,19 +86,16 @@ def user_speckle_connector_installation_path(host_application: str) -> Path:
)
print("Starting module dependency installation")
print(sys.executable)
PYTHON_PATH = sys.executable
def connector_installation_path(host_application: str) -> Path:
connector_installation_path = user_speckle_connector_installation_path(host_application)
connector_installation_path = user_speckle_connector_installation_path(
host_application
)
connector_installation_path.mkdir(exist_ok=True, parents=True)
# set user modules path at beginning of paths for earlier hit
@ -113,7 +106,6 @@ def connector_installation_path(host_application: str) -> Path:
return connector_installation_path
def is_pip_available() -> bool:
try:
import_module("pip") # noqa F401
@ -132,7 +124,9 @@ def ensure_pip() -> None:
if completed_process.returncode == 0:
print("Successfully installed pip")
else:
raise Exception(f"Failed to install pip, got {completed_process.returncode} return code")
raise Exception(
f"Failed to install pip, got {completed_process.returncode} return code"
)
def get_requirements_path() -> Path:
@ -195,6 +189,7 @@ def _import_dependencies() -> None:
# print(req)
# import_module("specklepy")
def ensure_dependencies(host_application: str) -> None:
try:
install_dependencies(host_application)
@ -202,6 +197,6 @@ def ensure_dependencies(host_application: str) -> None:
_import_dependencies()
print("Successfully found dependencies")
except ImportError:
raise Exception(f"Cannot automatically ensure Speckle dependencies. Please try restarting the host application {host_application}!")
raise Exception(
f"Cannot automatically ensure Speckle dependencies. Please try restarting the host application {host_application}!"
)