Skip to content
This repository was archived by the owner on Dec 31, 2023. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
17 commits
Select commit Hold shift + click to select a range
c1ea051
automl beta [(#1575)](https://github.com/GoogleCloudPlatform/python-d…
sirtorry Jul 20, 2018
81c7fc3
remove translate prediction fallback [(#1598)](https://github.com/Goo…
sirtorry Jul 24, 2018
069f434
skip automl model create/delete test [(#1608)](https://github.com/Goo…
sirtorry Aug 3, 2018
5e2dd8e
Auto-update dependencies. [(#1658)](https://github.com/GoogleCloudPla…
dpebot Aug 28, 2018
d91c4f1
Update AutoML region tags to use standard product prefixes [(#1669)](…
alixhami Aug 29, 2018
0650bcc
Fix AutoML region tag typos [(#1687)](https://github.com/GoogleCloudP…
alixhami Sep 6, 2018
027a481
Fixed name of model [(#1779)](https://github.com/GoogleCloudPlatform/…
engelke Oct 19, 2018
8323272
Auto-update dependencies. [(#1980)](https://github.com/GoogleCloudPla…
dpebot Feb 6, 2019
cea0af7
Updated beta version of automl [(#2124)](https://github.com/GoogleClo…
engelke Apr 26, 2019
22adca8
Translate: migrate published v3 translate batch samples [(#2914)](ht…
munkhuushmgl Mar 2, 2020
92028e3
chore(deps): update dependency google-cloud-storage to v1.28.1 [(#378…
renovate-bot May 19, 2020
975e4f0
Replace GCLOUD_PROJECT with GOOGLE_CLOUD_PROJECT. [(#4022)](https://g…
kurtisvg Jun 9, 2020
5082dee
fix(translate): fix a broken test [(#4360)](https://github.com/Google…
Jul 23, 2020
5c10928
chore(deps): update dependency google-cloud-translate to v2.0.2 [(#44…
renovate-bot Aug 4, 2020
0089f38
Update dependency google-cloud-storage to v1.30.0
renovate-bot Jul 29, 2020
d5a944b
docs: add cancel operation sample
busunkim96 Aug 18, 2020
b7b2677
chore: lint
busunkim96 Aug 18, 2020
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
62 changes: 62 additions & 0 deletions samples/beta/cancel_operation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
# -*- coding: utf-8 -*-
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


# [START automl_cancel_operation]

from google.cloud import automl_v1beta1


def sample_cancel_operation(project, operation_id):
"""
Cancel Long-Running Operation

Args:
project Required. Your Google Cloud Project ID.
operation_id Required. The ID of the Operation.
"""

client = automl_v1beta1.AutoMlClient()

operations_client = client.transport._operations_client

# project = '[Google Cloud Project ID]'
# operation_id = '[Operation ID]'
name = "projects/{}/locations/us-central1/operations/{}".format(
project, operation_id
)

operations_client.cancel_operation(name)

print(u"Cancelled operation: {}".format(name))


# [END automl_cancel_operation]


def main():
import argparse

parser = argparse.ArgumentParser()
parser.add_argument("--project", type=str, default="[Google Cloud Project ID]")
parser.add_argument("--operation_id", type=str, default="[Operation ID]")
args = parser.parse_args()

sample_cancel_operation(args.project, args.operation_id)


if __name__ == "__main__":
main()
278 changes: 278 additions & 0 deletions samples/snippets/automl_translation_dataset.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,278 @@
#!/usr/bin/env python

# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""This application demonstrates how to perform basic operations on dataset
with the Google AutoML Translation API.

For more information, see the documentation at
https://cloud.google.com/translate/automl/docs
"""

import argparse
import os


def create_dataset(project_id, compute_region, dataset_name, source, target):
"""Create a dataset."""
# [START automl_translate_create_dataset]
# TODO(developer): Uncomment and set the following variables
# project_id = 'PROJECT_ID_HERE'
# compute_region = 'COMPUTE_REGION_HERE'
# dataset_name = 'DATASET_NAME_HERE'
# source = 'LANGUAGE_CODE_OF_SOURCE_LANGUAGE'
# target = 'LANGUAGE_CODE_OF_TARGET_LANGUAGE'

from google.cloud import automl_v1beta1 as automl

client = automl.AutoMlClient()

# A resource that represents Google Cloud Platform location.
project_location = client.location_path(project_id, compute_region)

# Specify the source and target language.
dataset_metadata = {
"source_language_code": source,
"target_language_code": target,
}
# Set dataset name and dataset metadata
my_dataset = {
"display_name": dataset_name,
"translation_dataset_metadata": dataset_metadata,
}

# Create a dataset with the dataset metadata in the region.
dataset = client.create_dataset(project_location, my_dataset)

# Display the dataset information
print("Dataset name: {}".format(dataset.name))
print("Dataset id: {}".format(dataset.name.split("/")[-1]))
print("Dataset display name: {}".format(dataset.display_name))
print("Translation dataset Metadata:")
print(
"\tsource_language_code: {}".format(
dataset.translation_dataset_metadata.source_language_code
)
)
print(
"\ttarget_language_code: {}".format(
dataset.translation_dataset_metadata.target_language_code
)
)
print("Dataset create time:")
print("\tseconds: {}".format(dataset.create_time.seconds))
print("\tnanos: {}".format(dataset.create_time.nanos))

# [END automl_translate_create_dataset]


def list_datasets(project_id, compute_region, filter_):
"""List Datasets."""
# [START automl_translate_list_datasets]
# TODO(developer): Uncomment and set the following variables
# project_id = 'PROJECT_ID_HERE'
# compute_region = 'COMPUTE_REGION_HERE'
# filter_ = 'filter expression here'

from google.cloud import automl_v1beta1 as automl

client = automl.AutoMlClient()

# A resource that represents Google Cloud Platform location.
project_location = client.location_path(project_id, compute_region)

# List all the datasets available in the region by applying filter.
response = client.list_datasets(project_location, filter_)

print("List of datasets:")
for dataset in response:
# Display the dataset information
print("Dataset name: {}".format(dataset.name))
print("Dataset id: {}".format(dataset.name.split("/")[-1]))
print("Dataset display name: {}".format(dataset.display_name))
print("Translation dataset metadata:")
print(
"\tsource_language_code: {}".format(
dataset.translation_dataset_metadata.source_language_code
)
)
print(
"\ttarget_language_code: {}".format(
dataset.translation_dataset_metadata.target_language_code
)
)
print("Dataset create time:")
print("\tseconds: {}".format(dataset.create_time.seconds))
print("\tnanos: {}".format(dataset.create_time.nanos))

# [END automl_translate_list_datasets]


def get_dataset(project_id, compute_region, dataset_id):
"""Get the dataset."""
# [START automl_translate_get_dataset]
# TODO(developer): Uncomment and set the following variables
# project_id = 'PROJECT_ID_HERE'
# compute_region = 'COMPUTE_REGION_HERE'
# dataset_id = 'DATASET_ID_HERE'

from google.cloud import automl_v1beta1 as automl

client = automl.AutoMlClient()

# Get the full path of the dataset
dataset_full_id = client.dataset_path(
project_id, compute_region, dataset_id
)

# Get complete detail of the dataset.
dataset = client.get_dataset(dataset_full_id)

# Display the dataset information
print("Dataset name: {}".format(dataset.name))
print("Dataset id: {}".format(dataset.name.split("/")[-1]))
print("Dataset display name: {}".format(dataset.display_name))
print("Translation dataset metadata:")
print(
"\tsource_language_code: {}".format(
dataset.translation_dataset_metadata.source_language_code
)
)
print(
"\ttarget_language_code: {}".format(
dataset.translation_dataset_metadata.target_language_code
)
)
print("Dataset create time:")
print("\tseconds: {}".format(dataset.create_time.seconds))
print("\tnanos: {}".format(dataset.create_time.nanos))

# [END automl_translate_get_dataset]


def import_data(project_id, compute_region, dataset_id, path):
"""Import sentence pairs to the dataset."""
# [START automl_translate_import_data]
# TODO(developer): Uncomment and set the following variables
# project_id = 'PROJECT_ID_HERE'
# compute_region = 'COMPUTE_REGION_HERE'
# dataset_id = 'DATASET_ID_HERE'
# path = 'gs://path/to/file.csv'

from google.cloud import automl_v1beta1 as automl

client = automl.AutoMlClient()

# Get the full path of the dataset.
dataset_full_id = client.dataset_path(
project_id, compute_region, dataset_id
)

# Get the multiple Google Cloud Storage URIs
input_uris = path.split(",")
input_config = {"gcs_source": {"input_uris": input_uris}}

# Import data from the input URI
response = client.import_data(dataset_full_id, input_config)

print("Processing import...")
# synchronous check of operation status
print("Data imported. {}".format(response.result()))

# [END automl_translate_import_data]


def delete_dataset(project_id, compute_region, dataset_id):
"""Delete a dataset."""
# [START automl_translate_delete_dataset]
# TODO(developer): Uncomment and set the following variables
# project_id = 'PROJECT_ID_HERE'
# compute_region = 'COMPUTE_REGION_HERE'
# dataset_id = 'DATASET_ID_HERE'

from google.cloud import automl_v1beta1 as automl

client = automl.AutoMlClient()

# Get the full path of the dataset.
dataset_full_id = client.dataset_path(
project_id, compute_region, dataset_id
)

# Delete a dataset.
response = client.delete_dataset(dataset_full_id)

# synchronous check of operation status
print("Dataset deleted. {}".format(response.result()))

# [END automl_translate_delete_dataset]


if __name__ == "__main__":
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
subparsers = parser.add_subparsers(dest="command")

create_dataset_parser = subparsers.add_parser(
"create_dataset", help=create_dataset.__doc__
)
create_dataset_parser.add_argument("dataset_name")
create_dataset_parser.add_argument("source")
create_dataset_parser.add_argument("target")

list_datasets_parser = subparsers.add_parser(
"list_datasets", help=list_datasets.__doc__
)
list_datasets_parser.add_argument("filter", nargs="?", default="")

import_data_parser = subparsers.add_parser(
"import_data", help=import_data.__doc__
)
import_data_parser.add_argument("dataset_id")
import_data_parser.add_argument("path")

delete_dataset_parser = subparsers.add_parser(
"delete_dataset", help=delete_dataset.__doc__
)
delete_dataset_parser.add_argument("dataset_id")

get_dataset_parser = subparsers.add_parser(
"get_dataset", help=get_dataset.__doc__
)
get_dataset_parser.add_argument("dataset_id")

project_id = os.environ["PROJECT_ID"]
compute_region = os.environ["REGION_NAME"]

args = parser.parse_args()

if args.command == "create_dataset":
create_dataset(
project_id,
compute_region,
args.dataset_name,
args.source,
args.target,
)
if args.command == "list_datasets":
list_datasets(project_id, compute_region, args.filter)
if args.command == "get_dataset":
get_dataset(project_id, compute_region, args.dataset_id)
if args.command == "import_data":
import_data(project_id, compute_region, args.dataset_id, args.path)
if args.command == "delete_dataset":
delete_dataset(project_id, compute_region, args.dataset_id)
Loading