repo_name
stringlengths
7
94
repo_path
stringlengths
4
237
repo_head_hexsha
stringlengths
40
40
content
stringlengths
10
680k
apis
stringlengths
2
840k
MTES-MCT/sparte
public_data/serializers.py
3b8ae6d21da81ca761d64ae9dfe2c8f54487211c
from rest_framework_gis import serializers from rest_framework import serializers as s from .models import ( Artificialisee2015to2018, Artificielle2018, CommunesSybarval, CouvertureSol, EnveloppeUrbaine2018, Ocsge, Renaturee2018to2015, Sybarval, Voirie2018, ZonesBaties2018, UsageSol, ) def get_label(code="", label=""): if code is None: code = "-" if label is None: label = "inconnu" return f"{code} {label[:30]}" class Artificialisee2015to2018Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 = s.SerializerMethodField() couverture_2015 = s.SerializerMethodField() couverture_2018 = s.SerializerMethodField() def get_usage_2015(self, obj): return get_label(code=obj.us_2015, label=obj.us_2015_label) def get_usage_2018(self, obj): return get_label(code=obj.us_2018, label=obj.us_2018_label) def get_couverture_2015(self, obj): return get_label(code=obj.cs_2015, label=obj.cs_2015_label) def get_couverture_2018(self, obj): return get_label(code=obj.cs_2018, label=obj.cs_2018_label) class Meta: fields = ( "id", "surface", "usage_2015", "usage_2018", "couverture_2015", "couverture_2018", ) geo_field = "mpoly" model = Artificialisee2015to2018 class Artificielle2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) class Meta: fields = ( "id", "surface", "couverture", ) geo_field = "mpoly" model = Artificielle2018 class CommunesSybarvalSerializer(serializers.GeoFeatureModelSerializer): """Marker GeoJSON serializer.""" class Meta: """Marker serializer meta class.""" fields = ( "nom", "code_insee", "surface", ) geo_field = "mpoly" model = CommunesSybarval class EnveloppeUrbaine2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) class Meta: fields = ( "id", "couverture", "surface", ) geo_field = "mpoly" model = EnveloppeUrbaine2018 class OcsgeSerializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) def get_usage(self, obj): return get_label(code=obj.usage, label=obj.usage_label) class Meta: fields = ( "id", "couverture", "usage", "millesime", "map_color", "year", ) geo_field = "mpoly" model = Ocsge class Renaturee2018to2015Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 = s.SerializerMethodField() couverture_2015 = s.SerializerMethodField() couverture_2018 = s.SerializerMethodField() def get_usage_2015(self, obj): return get_label(code=obj.us_2015, label=obj.us_2015_label) def get_usage_2018(self, obj): return get_label(code=obj.us_2018, label=obj.us_2018_label) def get_couverture_2015(self, obj): return get_label(code=obj.cs_2015, label=obj.cs_2015_label) def get_couverture_2018(self, obj): return get_label(code=obj.cs_2018, label=obj.cs_2018_label) class Meta: fields = ( "id", "surface", "usage_2015", "usage_2018", "couverture_2015", "couverture_2018", ) geo_field = "mpoly" model = Renaturee2018to2015 class SybarvalSerializer(serializers.GeoFeatureModelSerializer): class Meta: fields = ( "id", "surface", ) geo_field = "mpoly" model = Sybarval class Voirie2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) def get_usage(self, obj): return get_label(code=obj.usage, label=obj.usage_label) class Meta: fields = ( "id", "surface", "couverture", "usage", ) geo_field = "mpoly" model = Voirie2018 class ZonesBaties2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) def get_usage(self, obj): return get_label(code=obj.usage, label=obj.usage_label) class Meta: fields = ( "id", "couverture", "usage", "surface", ) geo_field = "mpoly" model = ZonesBaties2018 class CouvertureSolSerializer(serializers.ModelSerializer): class Meta: fields = ( "id", "parent", "code", "label", "is_artificial", ) model = CouvertureSol class UsageSolSerializer(serializers.ModelSerializer): class Meta: fields = ( "id", "parent", "code", "label", ) model = UsageSol
[((28, 17, 28, 42), 'rest_framework.serializers.SerializerMethodField', 's.SerializerMethodField', ({}, {}), '()', True, 'from rest_framework import serializers as s\n'), ((29, 17, 29, 42), 'rest_framework.serializers.SerializerMethodField', 's.SerializerMethodField', ({}, {}), '()', True, 'from rest_framework import serializers as s\n'), ((30, 22, 30, 47), 'rest_framework.serializers.SerializerMethodField', 's.SerializerMethodField', ({}, {}), '()', True, 'from rest_framework import serializers as s\n'), ((31, 22, 31, 47), 'rest_framework.serializers.SerializerMethodField', 's.SerializerMethodField', ({}, {}), '()', True, 'from rest_framework import serializers as s\n'), ((59, 17, 59, 42), 'rest_framework.serializers.SerializerMethodField', 's.SerializerMethodField', ({}, {}), '()', True, 'from rest_framework import serializers as s\n'), ((90, 17, 90, 42), 'rest_framework.serializers.SerializerMethodField', 's.SerializerMethodField', ({}, {}), '()', True, 'from rest_framework import serializers as s\n'), ((106, 17, 106, 42), 'rest_framework.serializers.SerializerMethodField', 's.SerializerMethodField', ({}, {}), '()', True, 'from rest_framework import serializers as s\n'), ((107, 12, 107, 37), 'rest_framework.serializers.SerializerMethodField', 's.SerializerMethodField', ({}, {}), '()', True, 'from rest_framework import serializers as s\n'), ((129, 17, 129, 42), 'rest_framework.serializers.SerializerMethodField', 's.SerializerMethodField', ({}, {}), '()', True, 'from rest_framework import serializers as s\n'), ((130, 17, 130, 42), 'rest_framework.serializers.SerializerMethodField', 's.SerializerMethodField', ({}, {}), '()', True, 'from rest_framework import serializers as s\n'), ((131, 22, 131, 47), 'rest_framework.serializers.SerializerMethodField', 's.SerializerMethodField', ({}, {}), '()', True, 'from rest_framework import serializers as s\n'), ((132, 22, 132, 47), 'rest_framework.serializers.SerializerMethodField', 's.SerializerMethodField', ({}, {}), '()', True, 'from rest_framework import serializers as s\n'), ((170, 17, 170, 42), 'rest_framework.serializers.SerializerMethodField', 's.SerializerMethodField', ({}, {}), '()', True, 'from rest_framework import serializers as s\n'), ((171, 12, 171, 37), 'rest_framework.serializers.SerializerMethodField', 's.SerializerMethodField', ({}, {}), '()', True, 'from rest_framework import serializers as s\n'), ((191, 17, 191, 42), 'rest_framework.serializers.SerializerMethodField', 's.SerializerMethodField', ({}, {}), '()', True, 'from rest_framework import serializers as s\n'), ((192, 12, 192, 37), 'rest_framework.serializers.SerializerMethodField', 's.SerializerMethodField', ({}, {}), '()', True, 'from rest_framework import serializers as s\n')]
naman1901/django-quick-search
quick_search/admin.py
7b93554ed9fa4721e52372f9fd1a395d94cc04a7
from django.contrib import admin from .models import SearchResult # Register your models here. class SearchResultAdmin(admin.ModelAdmin): fields = ["query", "heading", "url", "text"] admin.site.register(SearchResult, SearchResultAdmin)
[((8, 0, 8, 52), 'django.contrib.admin.site.register', 'admin.site.register', ({(8, 20, 8, 32): 'SearchResult', (8, 34, 8, 51): 'SearchResultAdmin'}, {}), '(SearchResult, SearchResultAdmin)', False, 'from django.contrib import admin\n')]
Amirali-Shirkh/rasa-for-botfront
rasa/train.py
36aa24ad31241c5d1a180bbe34e1c8c50da40ff7
import asyncio import os import tempfile from contextlib import ExitStack from typing import Text, Optional, List, Union, Dict from rasa.importers.importer import TrainingDataImporter from rasa import model from rasa.model import FingerprintComparisonResult from rasa.core.domain import Domain from rasa.utils.common import TempDirectoryPath from rasa.cli.utils import ( print_success, print_warning, print_error, bcolors, print_color, ) from rasa.constants import DEFAULT_MODELS_PATH, DEFAULT_CORE_SUBDIRECTORY_NAME def train( domain: Text, config: Text, training_files: Union[Text, List[Text]], output: Text = DEFAULT_MODELS_PATH, force_training: bool = False, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] = None, loop: Optional[asyncio.AbstractEventLoop] = None, ) -> Optional[Text]: if loop is None: try: loop = asyncio.get_event_loop() except RuntimeError: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) return loop.run_until_complete( train_async( domain=domain, config=config, training_files=training_files, output_path=output, force_training=force_training, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) ) async def train_async( domain: Union[Domain, Text], config: Dict[Text, Text], training_files: Optional[Union[Text, List[Text]]], output_path: Text = DEFAULT_MODELS_PATH, force_training: bool = False, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: """Trains a Rasa model (Core and NLU). Args: domain: Path to the domain file. config: Dict of paths to the config for Core and NLU. Keys are language codes training_files: Paths to the training data for Core and NLU. output_path: Output path. force_training: If `True` retrain model even if data has not changed. fixed_model_name: Name of model to be stored. persist_nlu_training_data: `True` if the NLU training data should be persisted with the model. additional_arguments: Additional training parameters. Returns: Path of the trained model archive. """ # file_importer = TrainingDataImporter.load_from_config( # config, domain, training_files # ) with ExitStack() as stack: train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod from rasa_addons.importers import BotfrontFileImporter file_importer = BotfrontFileImporter(config, domain, training_files) # domain = await file_importer.get_domain() # if domain.is_empty(): # return await handle_domain_if_not_exists( # file_importer, output_path, fixed_model_name # ) # /bf mod return await _train_async_internal( file_importer, train_path, output_path, force_training, fixed_model_name, persist_nlu_training_data, additional_arguments, ) async def handle_domain_if_not_exists( file_importer: TrainingDataImporter, output_path, fixed_model_name ): nlu_model_only = await _train_nlu_with_validated_data( file_importer, output=output_path, fixed_model_name=fixed_model_name ) print_warning( "Core training was skipped because no valid domain file was found. Only an nlu-model was created." "Please specify a valid domain using '--domain' argument or check if the provided domain file exists." ) return nlu_model_only async def _train_async_internal( file_importer: TrainingDataImporter, train_path: Text, output_path: Text, force_training: bool, fixed_model_name: Optional[Text], persist_nlu_training_data: bool, additional_arguments: Optional[Dict], ) -> Optional[Text]: """Trains a Rasa model (Core and NLU). Use only from `train_async`. Args: file_importer: `TrainingDataImporter` which supplies the training data. train_path: Directory in which to train the model. output_path: Output path. force_training: If `True` retrain model even if data has not changed. persist_nlu_training_data: `True` if the NLU training data should be persisted with the model. fixed_model_name: Name of model to be stored. additional_arguments: Additional training parameters. Returns: Path of the trained model archive. """ stories, nlu_data = await asyncio.gather( file_importer.get_stories(), file_importer.get_nlu_data() ) # if stories.is_empty() and nlu_data.is_empty(): # print_error( # "No training data given. Please provide stories and NLU data in " # "order to train a Rasa model using the '--data' argument." # ) # return # if nlu_data.is_empty(): # print_warning("No NLU data present. Just a Rasa Core model will be trained.") # return await _train_core_with_validated_data( # file_importer, # output=output_path, # fixed_model_name=fixed_model_name, # additional_arguments=additional_arguments, # ) new_fingerprint = await model.model_fingerprint(file_importer) old_model = model.get_latest_model(output_path) fingerprint_comparison = FingerprintComparisonResult(force_training=force_training) if not force_training: fingerprint_comparison = model.should_retrain( new_fingerprint, old_model, train_path ) # bf mod > if fingerprint_comparison.nlu == True: # replace True with list of all langs fingerprint_comparison.nlu = list(new_fingerprint.get("nlu-config", {}).keys()) domain = await file_importer.get_domain() core_untrainable = domain.is_empty() or stories.is_empty() nlu_untrainable = [l for l, d in nlu_data.items() if d.is_empty()] fingerprint_comparison.core = fingerprint_comparison.core and not core_untrainable fingerprint_comparison.nlu = [l for l in fingerprint_comparison.nlu if l not in nlu_untrainable] if core_untrainable: print_color("Skipping Core training since domain or stories are empty.", color=bcolors.OKBLUE) for lang in nlu_untrainable: print_color("No NLU data found for language <{}>, skipping training...".format(lang), color=bcolors.OKBLUE) # </ bf mod if fingerprint_comparison.is_training_required(): await _do_training( file_importer, output_path=output_path, train_path=train_path, fingerprint_comparison_result=fingerprint_comparison, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) return model.package_model( fingerprint=new_fingerprint, output_directory=output_path, train_path=train_path, fixed_model_name=fixed_model_name, ) print_success( "Nothing changed. You can use the old model stored at '{}'." "".format(os.path.abspath(old_model)) ) return old_model async def _do_training( file_importer: TrainingDataImporter, output_path: Text, train_path: Text, fingerprint_comparison_result: Optional[FingerprintComparisonResult] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] = None, ): if not fingerprint_comparison_result: fingerprint_comparison_result = FingerprintComparisonResult() if fingerprint_comparison_result.should_retrain_core(): await _train_core_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) elif fingerprint_comparison_result.should_retrain_nlg(): print_color( "Core stories/configuration did not change. " "Only the templates section has been changed. A new model with " "the updated templates will be created.", color=bcolors.OKBLUE, ) await model.update_model_with_new_domain(file_importer, train_path) else: print_color( "Core stories/configuration did not change. No need to retrain Core model.", color=bcolors.OKBLUE, ) if fingerprint_comparison_result.should_retrain_nlu(): await _train_nlu_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, retrain_nlu=fingerprint_comparison_result.nlu, persist_nlu_training_data=persist_nlu_training_data, ) else: print_color( "NLU data/configuration did not change. No need to retrain NLU model.", color=bcolors.OKBLUE, ) def train_core( domain: Union[Domain, Text], config: Text, stories: Text, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: loop = asyncio.get_event_loop() return loop.run_until_complete( train_core_async( domain=domain, config=config, stories=stories, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) ) async def train_core_async( domain: Union[Domain, Text], config: Text, stories: Text, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: """Trains a Core model. Args: domain: Path to the domain file. config: Path to the config file for Core. stories: Path to the Core training data. output: Output path. train_path: If `None` the model will be trained in a temporary directory, otherwise in the provided directory. fixed_model_name: Name of model to be stored. uncompress: If `True` the model will not be compressed. additional_arguments: Additional training parameters. Returns: If `train_path` is given it returns the path to the model archive, otherwise the path to the directory with the trained model files. """ file_importer = TrainingDataImporter.load_core_importer_from_config( config, domain, [stories] ) domain = await file_importer.get_domain() if domain.is_empty(): print_error( "Core training was skipped because no valid domain file was found. " "Please specify a valid domain using '--domain' argument or check if the provided domain file exists." ) return None if not await file_importer.get_stories(): print_error( "No stories given. Please provide stories in order to " "train a Rasa Core model using the '--stories' argument." ) return return await _train_core_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) async def _train_core_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: """Train Core with validated training and config data.""" import rasa.core.train with ExitStack() as stack: if train_path: # If the train path was provided, do nothing on exit. _train_path = train_path else: # Otherwise, create a temp train path and clean it up on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # normal (not compare) training print_color("Training Core model...", color=bcolors.OKBLUE) domain, config = await asyncio.gather( file_importer.get_domain(), file_importer.get_config() ) await rasa.core.train( domain_file=domain, training_resource=file_importer, output_path=os.path.join(_train_path, DEFAULT_CORE_SUBDIRECTORY_NAME), policy_config=config, additional_arguments=additional_arguments, ) print_color("Core model training completed.", color=bcolors.OKBLUE) if train_path is None: # Only Core was trained. new_fingerprint = await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix="core-", ) return _train_path def train_nlu( config: Text, nlu_data: Text, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, ) -> Optional[Text]: """Trains an NLU model. Args: config: Path to the config file for NLU. nlu_data: Path to the NLU training data. output: Output path. train_path: If `None` the model will be trained in a temporary directory, otherwise in the provided directory. fixed_model_name: Name of the model to be stored. persist_nlu_training_data: `True` if the NLU training data should be persisted with the model. Returns: If `train_path` is given it returns the path to the model archive, otherwise the path to the directory with the trained model files. """ loop = asyncio.get_event_loop() return loop.run_until_complete( _train_nlu_async( config, nlu_data, output, train_path, fixed_model_name, persist_nlu_training_data, ) ) async def _train_nlu_async( config: Text, nlu_data: Text, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, ): if not nlu_data: print_error( "No NLU data given. Please provide NLU data in order to train " "a Rasa NLU model using the '--nlu' argument." ) return # training NLU only hence the training files still have to be selected file_importer = TrainingDataImporter.load_nlu_importer_from_config( config, training_data_paths=[nlu_data] ) training_datas = await file_importer.get_nlu_data() if training_datas.is_empty(): print_error( f"Path '{nlu_data}' doesn't contain valid NLU data in it. " "Please verify the data format. " "The NLU model training will be skipped now." ) return return await _train_nlu_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, ) async def _train_nlu_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, retrain_nlu: Union[bool, List[Text]] = True ) -> Optional[Text]: """Train NLU with validated training and config data.""" import rasa.nlu.train with ExitStack() as stack: models = {} from rasa.nlu import config as cfg_loader if train_path: # If the train path was provided, do nothing on exit. _train_path = train_path else: # Otherwise, create a temp train path and clean it up on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod config = await file_importer.get_nlu_config(retrain_nlu) for lang in config: if config[lang]: print_color("Start training {} NLU model ...".format(lang), color=bcolors.OKBLUE) _, models[lang], _ = await rasa.nlu.train( config[lang], file_importer, _train_path, fixed_model_name="nlu-{}".format(lang), persist_nlu_training_data=persist_nlu_training_data, ) else: print_color("NLU data for language <{}> didn't change, skipping training...".format(lang), color=bcolors.OKBLUE) # /bf mod print_color("NLU model training completed.", color=bcolors.OKBLUE) if train_path is None: # Only NLU was trained new_fingerprint = await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix="nlu-", ) return _train_path
[((116, 4, 119, 5), 'rasa.cli.utils.print_warning', 'print_warning', ({(117, 8, 118, 110): '"""Core training was skipped because no valid domain file was found. Only an nlu-model was created.Please specify a valid domain using \'--domain\' argument or check if the provided domain file exists."""'}, {}), '(\n "Core training was skipped because no valid domain file was found. Only an nlu-model was created.Please specify a valid domain using \'--domain\' argument or check if the provided domain file exists."\n )', False, 'from rasa.cli.utils import print_success, print_warning, print_error, bcolors, print_color\n'), ((169, 16, 169, 51), 'rasa.model.get_latest_model', 'model.get_latest_model', ({(169, 39, 169, 50): 'output_path'}, {}), '(output_path)', False, 'from rasa import model\n'), ((170, 29, 170, 87), 'rasa.model.FingerprintComparisonResult', 'FingerprintComparisonResult', (), '', False, 'from rasa.model import FingerprintComparisonResult\n'), ((275, 11, 275, 35), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ({}, {}), '()', False, 'import asyncio\n'), ((317, 20, 319, 5), 'rasa.importers.importer.TrainingDataImporter.load_core_importer_from_config', 'TrainingDataImporter.load_core_importer_from_config', ({(318, 8, 318, 14): 'config', (318, 16, 318, 22): 'domain', (318, 24, 318, 33): '[stories]'}, {}), '(config, domain, [stories])', False, 'from rasa.importers.importer import TrainingDataImporter\n'), ((418, 11, 418, 35), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ({}, {}), '()', False, 'import asyncio\n'), ((447, 20, 449, 5), 'rasa.importers.importer.TrainingDataImporter.load_nlu_importer_from_config', 'TrainingDataImporter.load_nlu_importer_from_config', (), '', False, 'from rasa.importers.importer import TrainingDataImporter\n'), ((86, 9, 86, 20), 'contextlib.ExitStack', 'ExitStack', ({}, {}), '()', False, 'from contextlib import ExitStack\n'), ((91, 24, 91, 76), 'rasa_addons.importers.BotfrontFileImporter', 'BotfrontFileImporter', ({(91, 45, 91, 51): 'config', (91, 53, 91, 59): 'domain', (91, 61, 91, 75): 'training_files'}, {}), '(config, domain, training_files)', False, 'from rasa_addons.importers import BotfrontFileImporter\n'), ((168, 28, 168, 66), 'rasa.model.model_fingerprint', 'model.model_fingerprint', ({(168, 52, 168, 65): 'file_importer'}, {}), '(file_importer)', False, 'from rasa import model\n'), ((172, 33, 174, 9), 'rasa.model.should_retrain', 'model.should_retrain', ({(173, 12, 173, 27): 'new_fingerprint', (173, 29, 173, 38): 'old_model', (173, 40, 173, 50): 'train_path'}, {}), '(new_fingerprint, old_model, train_path)', False, 'from rasa import model\n'), ((186, 8, 186, 102), 'rasa.cli.utils.print_color', 'print_color', (), '', False, 'from rasa.cli.utils import print_success, print_warning, print_error, bcolors, print_color\n'), ((202, 15, 207, 9), 'rasa.model.package_model', 'model.package_model', (), '', False, 'from rasa import model\n'), ((226, 40, 226, 69), 'rasa.model.FingerprintComparisonResult', 'FingerprintComparisonResult', ({}, {}), '()', False, 'from rasa.model import FingerprintComparisonResult\n'), ((260, 8, 263, 9), 'rasa.cli.utils.print_color', 'print_color', (), '', False, 'from rasa.cli.utils import print_success, print_warning, print_error, bcolors, print_color\n'), ((322, 8, 325, 9), 'rasa.cli.utils.print_error', 'print_error', ({(323, 12, 324, 114): '"""Core training was skipped because no valid domain file was found. Please specify a valid domain using \'--domain\' argument or check if the provided domain file exists."""'}, {}), '(\n "Core training was skipped because no valid domain file was found. Please specify a valid domain using \'--domain\' argument or check if the provided domain file exists."\n )', False, 'from rasa.cli.utils import print_success, print_warning, print_error, bcolors, print_color\n'), ((329, 8, 332, 9), 'rasa.cli.utils.print_error', 'print_error', ({(330, 12, 331, 69): '"""No stories given. Please provide stories in order to train a Rasa Core model using the \'--stories\' argument."""'}, {}), '(\n "No stories given. Please provide stories in order to train a Rasa Core model using the \'--stories\' argument."\n )', False, 'from rasa.cli.utils import print_success, print_warning, print_error, bcolors, print_color\n'), ((355, 9, 355, 20), 'contextlib.ExitStack', 'ExitStack', ({}, {}), '()', False, 'from contextlib import ExitStack\n'), ((364, 8, 364, 67), 'rasa.cli.utils.print_color', 'print_color', (), '', False, 'from rasa.cli.utils import print_success, print_warning, print_error, bcolors, print_color\n'), ((375, 8, 375, 75), 'rasa.cli.utils.print_color', 'print_color', (), '', False, 'from rasa.cli.utils import print_success, print_warning, print_error, bcolors, print_color\n'), ((440, 8, 443, 9), 'rasa.cli.utils.print_error', 'print_error', ({(441, 12, 442, 58): '"""No NLU data given. Please provide NLU data in order to train a Rasa NLU model using the \'--nlu\' argument."""'}, {}), '(\n "No NLU data given. Please provide NLU data in order to train a Rasa NLU model using the \'--nlu\' argument."\n )', False, 'from rasa.cli.utils import print_success, print_warning, print_error, bcolors, print_color\n'), ((453, 8, 457, 9), 'rasa.cli.utils.print_error', 'print_error', ({(454, 12, 456, 57): 'f"""Path \'{nlu_data}\' doesn\'t contain valid NLU data in it. Please verify the data format. The NLU model training will be skipped now."""'}, {}), '(\n f"Path \'{nlu_data}\' doesn\'t contain valid NLU data in it. Please verify the data format. The NLU model training will be skipped now."\n )', False, 'from rasa.cli.utils import print_success, print_warning, print_error, bcolors, print_color\n'), ((481, 9, 481, 20), 'contextlib.ExitStack', 'ExitStack', ({}, {}), '()', False, 'from contextlib import ExitStack\n'), ((506, 8, 506, 74), 'rasa.cli.utils.print_color', 'print_color', (), '', False, 'from rasa.cli.utils import print_success, print_warning, print_error, bcolors, print_color\n'), ((36, 19, 36, 43), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ({}, {}), '()', False, 'import asyncio\n'), ((211, 18, 211, 44), 'os.path.abspath', 'os.path.abspath', ({(211, 34, 211, 43): 'old_model'}, {}), '(old_model)', False, 'import os\n'), ((237, 8, 242, 9), 'rasa.cli.utils.print_color', 'print_color', (), '', False, 'from rasa.cli.utils import print_success, print_warning, print_error, bcolors, print_color\n'), ((245, 8, 248, 9), 'rasa.cli.utils.print_color', 'print_color', (), '', False, 'from rasa.cli.utils import print_success, print_warning, print_error, bcolors, print_color\n'), ((380, 19, 386, 13), 'rasa.model.package_model', 'model.package_model', (), '', False, 'from rasa import model\n'), ((512, 19, 518, 13), 'rasa.model.package_model', 'model.package_model', (), '', False, 'from rasa import model\n'), ((38, 19, 38, 43), 'asyncio.new_event_loop', 'asyncio.new_event_loop', ({}, {}), '()', False, 'import asyncio\n'), ((39, 12, 39, 40), 'asyncio.set_event_loop', 'asyncio.set_event_loop', ({(39, 35, 39, 39): 'loop'}, {}), '(loop)', False, 'import asyncio\n'), ((87, 59, 87, 77), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ({}, {}), '()', False, 'import tempfile\n'), ((243, 14, 243, 75), 'rasa.model.update_model_with_new_domain', 'model.update_model_with_new_domain', ({(243, 49, 243, 62): 'file_importer', (243, 64, 243, 74): 'train_path'}, {}), '(file_importer, train_path)', False, 'from rasa import model\n'), ((379, 36, 379, 74), 'rasa.model.model_fingerprint', 'model.model_fingerprint', ({(379, 60, 379, 73): 'file_importer'}, {}), '(file_importer)', False, 'from rasa import model\n'), ((510, 36, 510, 74), 'rasa.model.model_fingerprint', 'model.model_fingerprint', ({(510, 60, 510, 73): 'file_importer'}, {}), '(file_importer)', False, 'from rasa import model\n'), ((361, 64, 361, 82), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ({}, {}), '()', False, 'import tempfile\n'), ((371, 24, 371, 81), 'os.path.join', 'os.path.join', ({(371, 37, 371, 48): '_train_path', (371, 50, 371, 80): 'DEFAULT_CORE_SUBDIRECTORY_NAME'}, {}), '(_train_path, DEFAULT_CORE_SUBDIRECTORY_NAME)', False, 'import os\n'), ((490, 64, 490, 82), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ({}, {}), '()', False, 'import tempfile\n')]
Jahidul007/Python-Bootcamp
coding_intereview/1475. Final Prices With a Special Discount in a Shop.py
3c870587465ff66c2c1871c8d3c4eea72463abda
class Solution: def finalPrices(self, prices: List[int]) -> List[int]: res = [] for i in range(len(prices)): for j in range(i+1,len(prices)): if prices[j]<=prices[i]: res.append(prices[i]-prices[j]) break if j==len(prices)-1: res.append(prices[i]) res.append(prices[-1]) return res
[]
timgates42/denite.nvim
rplugin/python3/denite/ui/default.py
12a9b5456f5a4600afeb0ba284ce1098bd35e501
# ============================================================================ # FILE: default.py # AUTHOR: Shougo Matsushita <Shougo.Matsu at gmail.com> # License: MIT license # ============================================================================ import re import typing from denite.util import echo, error, clearmatch, regex_convert_py_vim from denite.util import Nvim, UserContext, Candidates, Candidate from denite.parent import SyncParent class Default(object): @property def is_async(self) -> bool: return self._is_async def __init__(self, vim: Nvim) -> None: self._vim = vim self._denite: typing.Optional[SyncParent] = None self._selected_candidates: typing.List[int] = [] self._candidates: Candidates = [] self._cursor = 0 self._entire_len = 0 self._result: typing.List[typing.Any] = [] self._context: UserContext = {} self._bufnr = -1 self._winid = -1 self._winrestcmd = '' self._initialized = False self._winheight = 0 self._winwidth = 0 self._winminheight = -1 self._is_multi = False self._is_async = False self._matched_pattern = '' self._displayed_texts: typing.List[str] = [] self._statusline_sources = '' self._titlestring = '' self._ruler = False self._prev_action = '' self._prev_status: typing.Dict[str, typing.Any] = {} self._prev_curpos: typing.List[typing.Any] = [] self._save_window_options: typing.Dict[str, typing.Any] = {} self._sources_history: typing.List[typing.Any] = [] self._previous_text = '' self._floating = False self._filter_floating = False self._updated = False self._timers: typing.Dict[str, int] = {} self._matched_range_id = -1 self._matched_char_id = -1 self._check_matchdelete = bool(self._vim.call( 'denite#util#check_matchdelete')) def start(self, sources: typing.List[typing.Any], context: UserContext) -> typing.List[typing.Any]: if not self._denite: # if hasattr(self._vim, 'run_coroutine'): # self._denite = ASyncParent(self._vim) # else: self._denite = SyncParent(self._vim) self._result = [] context['sources_queue'] = [sources] self._start_sources_queue(context) return self._result def do_action(self, action_name: str, command: str = '', is_manual: bool = False) -> None: if is_manual: candidates = self._get_selected_candidates() elif self._get_cursor_candidate(): candidates = [self._get_cursor_candidate()] else: candidates = [] if not self._denite or not candidates or not action_name: return self._prev_action = action_name action = self._denite.get_action( self._context, action_name, candidates) if not action: return post_action = self._context['post_action'] is_quit = action['is_quit'] or post_action == 'quit' if is_quit: self.quit() self._denite.do_action(self._context, action_name, candidates) self._result = candidates if command != '': self._vim.command(command) if is_quit and post_action == 'open': # Re-open denite buffer prev_cursor = self._cursor cursor_candidate = self._get_cursor_candidate() self._init_buffer() self.redraw(False) if cursor_candidate == self._get_candidate(prev_cursor): # Restore the cursor self._move_to_pos(prev_cursor) # Disable quit flag is_quit = False if not is_quit and is_manual: self._selected_candidates = [] self.redraw(action['is_redraw']) if is_manual and self._context['sources_queue']: self._context['input'] = '' self._context['quick_move'] = '' self._start_sources_queue(self._context) return def redraw(self, is_force: bool = True) -> None: self._context['is_redraw'] = is_force if is_force: self._gather_candidates() if self._update_candidates(): self._update_buffer() else: self._update_status() self._context['is_redraw'] = False def quit(self) -> None: if self._denite: self._denite.on_close(self._context) self._quit_buffer() self._result = [] return def _restart(self) -> None: self._context['input'] = '' self._quit_buffer() self._init_denite() self._gather_candidates() self._init_buffer() self._update_candidates() self._update_buffer() def _start_sources_queue(self, context: UserContext) -> None: if not context['sources_queue']: return self._sources_history.append({ 'sources': context['sources_queue'][0], 'path': context['path'], }) self._start(context['sources_queue'][0], context) if context['sources_queue']: context['sources_queue'].pop(0) context['path'] = self._context['path'] def _start(self, sources: typing.List[typing.Any], context: UserContext) -> None: from denite.ui.map import do_map self._vim.command('silent! autocmd! denite') if re.search(r'\[Command Line\]$', self._vim.current.buffer.name): # Ignore command line window. return resume = self._initialized and context['resume'] if resume: # Skip the initialization update = ('immediately', 'immediately_1', 'cursor_pos', 'prev_winid', 'start_filter', 'quick_move') for key in update: self._context[key] = context[key] self._check_move_option() if self._check_do_option(): return self._init_buffer() if context['refresh']: self.redraw() self._move_to_pos(self._cursor) else: if self._context != context: self._context.clear() self._context.update(context) self._context['sources'] = sources self._context['is_redraw'] = False self._is_multi = len(sources) > 1 if not sources: # Ignore empty sources. error(self._vim, 'Empty sources') return self._init_denite() self._gather_candidates() self._update_candidates() self._init_cursor() self._check_move_option() if self._check_do_option(): return self._init_buffer() self._update_displayed_texts() self._update_buffer() self._move_to_pos(self._cursor) if self._context['quick_move'] and do_map(self, 'quick_move', []): return if self._context['start_filter']: do_map(self, 'open_filter_buffer', []) def _init_buffer(self) -> None: self._prev_status = dict() self._displayed_texts = [] self._prev_bufnr = self._vim.current.buffer.number self._prev_curpos = self._vim.call('getcurpos') self._prev_wininfo = self._get_wininfo() self._prev_winid = self._context['prev_winid'] self._winrestcmd = self._vim.call('winrestcmd') self._ruler = self._vim.options['ruler'] self._switch_buffer() self._bufnr = self._vim.current.buffer.number self._winid = self._vim.call('win_getid') self._resize_buffer(True) self._winheight = self._vim.current.window.height self._winwidth = self._vim.current.window.width self._bufvars = self._vim.current.buffer.vars self._bufvars['denite'] = { 'buffer_name': self._context['buffer_name'], } self._bufvars['denite_statusline'] = {} self._vim.vars['denite#_previewed_buffers'] = {} self._save_window_options = {} window_options = { 'colorcolumn', 'concealcursor', 'conceallevel', 'cursorcolumn', 'cursorline', 'foldcolumn', 'foldenable', 'list', 'number', 'relativenumber', 'signcolumn', 'spell', 'winfixheight', 'wrap', } for k in window_options: self._save_window_options[k] = self._vim.current.window.options[k] # Note: Have to use setlocal instead of "current.window.options" # "current.window.options" changes global value instead of local in # neovim. self._vim.command('setlocal colorcolumn=') self._vim.command('setlocal conceallevel=3') self._vim.command('setlocal concealcursor=inv') self._vim.command('setlocal nocursorcolumn') self._vim.command('setlocal nofoldenable') self._vim.command('setlocal foldcolumn=0') self._vim.command('setlocal nolist') self._vim.command('setlocal nonumber') self._vim.command('setlocal norelativenumber') self._vim.command('setlocal nospell') self._vim.command('setlocal winfixheight') self._vim.command('setlocal nowrap') if self._context['prompt']: self._vim.command('setlocal signcolumn=yes') else: self._vim.command('setlocal signcolumn=auto') if self._context['cursorline']: self._vim.command('setlocal cursorline') options = self._vim.current.buffer.options if self._floating: # Disable ruler self._vim.options['ruler'] = False options['buftype'] = 'nofile' options['bufhidden'] = 'delete' options['swapfile'] = False options['buflisted'] = False options['modeline'] = False options['modifiable'] = False options['filetype'] = 'denite' if self._vim.call('exists', '#WinEnter'): self._vim.command('doautocmd WinEnter') if self._vim.call('exists', '#BufWinEnter'): self._vim.command('doautocmd BufWinEnter') if not self._vim.call('has', 'nvim'): # In Vim8, FileType autocmd is not fired after set filetype option. self._vim.command('silent doautocmd FileType denite') if self._context['auto_action']: self._vim.command('autocmd denite ' 'CursorMoved <buffer> ' 'call denite#call_map("auto_action")') self._init_syntax() def _switch_buffer(self) -> None: split = self._context['split'] if (split != 'no' and self._winid > 0 and self._vim.call('win_gotoid', self._winid)): if split != 'vertical' and not self._floating: # Move the window to bottom self._vim.command('wincmd J') self._winrestcmd = '' return self._floating = split in [ 'floating', 'floating_relative_cursor', 'floating_relative_window', ] self._filter_floating = False if self._vim.current.buffer.options['filetype'] != 'denite': self._titlestring = self._vim.options['titlestring'] command = 'edit' if split == 'tab': self._vim.command('tabnew') elif self._floating: self._split_floating(split) elif self._context['filter_split_direction'] == 'floating': self._filter_floating = True elif split != 'no': command = self._get_direction() command += ' vsplit' if split == 'vertical' else ' split' bufname = '[denite]-' + self._context['buffer_name'] if self._vim.call('exists', '*bufadd'): bufnr = self._vim.call('bufadd', bufname) vertical = 'vertical' if split == 'vertical' else '' command = ( 'buffer' if split in ['no', 'tab', 'floating', 'floating_relative_window', 'floating_relative_cursor'] else 'sbuffer') self._vim.command( 'silent keepalt %s %s %s %s' % ( self._get_direction(), vertical, command, bufnr, ) ) else: self._vim.call( 'denite#util#execute_path', f'silent keepalt {command}', bufname) def _get_direction(self) -> str: direction = str(self._context['direction']) if direction == 'dynamictop' or direction == 'dynamicbottom': self._update_displayed_texts() winwidth = self._vim.call('winwidth', 0) is_fit = not [x for x in self._displayed_texts if self._vim.call('strwidth', x) > winwidth] if direction == 'dynamictop': direction = 'aboveleft' if is_fit else 'topleft' else: direction = 'belowright' if is_fit else 'botright' return direction def _get_wininfo(self) -> typing.List[typing.Any]: return [ self._vim.options['columns'], self._vim.options['lines'], self._vim.call('win_getid'), self._vim.call('tabpagebuflist') ] def _switch_prev_buffer(self) -> None: if (self._prev_bufnr == self._bufnr or self._vim.buffers[self._prev_bufnr].name == ''): self._vim.command('enew') else: self._vim.command('buffer ' + str(self._prev_bufnr)) def _init_syntax(self) -> None: self._vim.command('syntax case ignore') self._vim.command('highlight default link deniteInput ModeMsg') self._vim.command('highlight link deniteMatchedRange ' + self._context['highlight_matched_range']) self._vim.command('highlight link deniteMatchedChar ' + self._context['highlight_matched_char']) self._vim.command('highlight default link ' + 'deniteStatusLinePath Comment') self._vim.command('highlight default link ' + 'deniteStatusLineNumber LineNR') self._vim.command('highlight default link ' + 'deniteSelectedLine Statement') if self._floating: self._vim.current.window.options['winhighlight'] = ( 'Normal:' + self._context['highlight_window_background'] ) self._vim.command(('syntax match deniteSelectedLine /^[%s].*/' + ' contains=deniteConcealedMark') % ( self._context['selected_icon'])) self._vim.command(('syntax match deniteConcealedMark /^[ %s]/' + ' conceal contained') % ( self._context['selected_icon'])) if self._denite: self._denite.init_syntax(self._context, self._is_multi) def _update_candidates(self) -> bool: if not self._denite: return False [self._is_async, pattern, statuses, self._entire_len, self._candidates] = self._denite.filter_candidates(self._context) prev_displayed_texts = self._displayed_texts self._update_displayed_texts() prev_matched_pattern = self._matched_pattern self._matched_pattern = pattern prev_statusline_sources = self._statusline_sources self._statusline_sources = ' '.join(statuses) if self._is_async: self._start_timer('update_candidates') else: self._stop_timer('update_candidates') updated = (self._displayed_texts != prev_displayed_texts or self._matched_pattern != prev_matched_pattern or self._statusline_sources != prev_statusline_sources) if updated: self._updated = True self._start_timer('update_buffer') if self._context['search'] and self._context['input']: self._vim.call('setreg', '/', self._context['input']) return self._updated def _update_displayed_texts(self) -> None: candidates_len = len(self._candidates) if not self._is_async and self._context['auto_resize']: winminheight = self._context['winminheight'] max_height = min(self._context['winheight'], self._get_max_height()) if (winminheight != -1 and candidates_len < winminheight): self._winheight = winminheight elif candidates_len > max_height: self._winheight = max_height elif candidates_len != self._winheight: self._winheight = candidates_len max_source_name_len = 0 if self._candidates: max_source_name_len = max([ len(self._get_display_source_name(x['source_name'])) for x in self._candidates]) self._context['max_source_name_len'] = max_source_name_len self._context['max_source_name_format'] = ( '{:<' + str(self._context['max_source_name_len']) + '}') self._displayed_texts = [ self._get_candidate_display_text(i) for i in range(0, candidates_len) ] def _update_buffer(self) -> None: is_current_buffer = self._bufnr == self._vim.current.buffer.number self._update_status() if self._check_matchdelete and self._context['match_highlight']: matches = [x['id'] for x in self._vim.call('getmatches', self._winid)] if self._matched_range_id in matches: self._vim.call('matchdelete', self._matched_range_id, self._winid) self._matched_range_id = -1 if self._matched_char_id in matches: self._vim.call('matchdelete', self._matched_char_id, self._winid) self._matched_char_id = -1 if self._matched_pattern != '': self._matched_range_id = self._vim.call( 'matchadd', 'deniteMatchedRange', r'\c' + regex_convert_py_vim(self._matched_pattern), 10, -1, {'window': self._winid}) matched_char_pattern = '[{}]'.format(re.sub( r'([\[\]\\^-])', r'\\\1', self._context['input'].replace(' ', '') )) self._matched_char_id = self._vim.call( 'matchadd', 'deniteMatchedChar', matched_char_pattern, 10, -1, {'window': self._winid}) prev_linenr = self._vim.call('line', '.') prev_candidate = self._get_cursor_candidate() buffer = self._vim.buffers[self._bufnr] buffer.options['modifiable'] = True self._vim.vars['denite#_candidates'] = [ x['word'] for x in self._candidates] buffer[:] = self._displayed_texts buffer.options['modifiable'] = False self._previous_text = self._context['input'] self._resize_buffer(is_current_buffer) is_changed = (self._context['reversed'] or (is_current_buffer and self._previous_text != self._context['input'])) if self._updated and is_changed: if not is_current_buffer: save_winid = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) self._init_cursor() self._move_to_pos(self._cursor) if not is_current_buffer: self._vim.call('win_gotoid', save_winid) elif is_current_buffer: self._vim.call('cursor', [prev_linenr, 0]) if is_current_buffer: if (self._context['auto_action'] and prev_candidate != self._get_cursor_candidate()): self.do_action(self._context['auto_action']) self._updated = False self._stop_timer('update_buffer') def _update_status(self) -> None: inpt = '' if self._context['input']: inpt = self._context['input'] + ' ' if self._context['error_messages']: inpt = '[ERROR] ' + inpt path = '[' + self._context['path'] + ']' status = { 'input': inpt, 'sources': self._statusline_sources, 'path': path, # Extra 'buffer_name': self._context['buffer_name'], 'line_total': len(self._candidates), } if status == self._prev_status: return self._bufvars['denite_statusline'] = status self._prev_status = status linenr = "printf('%'.(len(line('$'))+2).'d/%d',line('.'),line('$'))" if self._context['statusline']: if self._floating or self._filter_floating: self._vim.options['titlestring'] = ( "%{denite#get_status('input')}%* " + "%{denite#get_status('sources')} " + " %{denite#get_status('path')}%*" + "%{" + linenr + "}%*") else: winnr = self._vim.call('win_id2win', self._winid) self._vim.call('setwinvar', winnr, '&statusline', ( "%#deniteInput#%{denite#get_status('input')}%* " + "%{denite#get_status('sources')} %=" + "%#deniteStatusLinePath# %{denite#get_status('path')}%*" + "%#deniteStatusLineNumber#%{" + linenr + "}%*")) def _get_display_source_name(self, name: str) -> str: source_names = self._context['source_names'] if not self._is_multi or source_names == 'hide': source_name = '' else: short_name = (re.sub(r'([a-zA-Z])[a-zA-Z]+', r'\1', name) if re.search(r'[^a-zA-Z]', name) else name[:2]) source_name = short_name if source_names == 'short' else name return source_name def _get_candidate_display_text(self, index: int) -> str: source_names = self._context['source_names'] candidate = self._candidates[index] terms = [] if self._is_multi and source_names != 'hide': terms.append(self._context['max_source_name_format'].format( self._get_display_source_name(candidate['source_name']))) encoding = self._context['encoding'] abbr = candidate.get('abbr', candidate['word']).encode( encoding, errors='replace').decode(encoding, errors='replace') terms.append(abbr[:int(self._context['max_candidate_width'])]) return (str(self._context['selected_icon']) if index in self._selected_candidates else ' ') + ' '.join(terms).replace('\n', '') def _get_max_height(self) -> int: return int(self._vim.options['lines']) if not self._floating else ( int(self._vim.options['lines']) - int(self._context['winrow']) - int(self._vim.options['cmdheight'])) def _resize_buffer(self, is_current_buffer: bool) -> None: split = self._context['split'] if (split == 'no' or split == 'tab' or self._vim.call('winnr', '$') == 1): return winheight = max(self._winheight, 1) winwidth = max(self._winwidth, 1) is_vertical = split == 'vertical' if not is_current_buffer: restore = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) if not is_vertical and self._vim.current.window.height != winheight: if self._floating: wincol = self._context['winrow'] row = wincol if split == 'floating': if self._context['auto_resize'] and row > 1: row += self._context['winheight'] row -= self._winheight self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'editor', 'row': row, 'col': self._context['wincol'], 'width': winwidth, 'height': winheight, }) filter_row = 0 if wincol == 1 else row + winheight filter_col = self._context['wincol'] else: init_pos = self._vim.call('nvim_win_get_config', self._winid) self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'win', 'win': init_pos['win'], 'row': init_pos['row'], 'col': init_pos['col'], 'width': winwidth, 'height': winheight, }) filter_col = init_pos['col'] if init_pos['anchor'] == 'NW': winpos = self._vim.call('nvim_win_get_position', self._winid) filter_row = winpos[0] + winheight filter_winid = self._vim.vars['denite#_filter_winid'] self._context['filter_winrow'] = row if self._vim.call('win_id2win', filter_winid) > 0: self._vim.call('nvim_win_set_config', filter_winid, { 'relative': 'editor', 'row': filter_row, 'col': filter_col, }) self._vim.command('resize ' + str(winheight)) if self._context['reversed']: self._vim.command('normal! zb') elif is_vertical and self._vim.current.window.width != winwidth: self._vim.command('vertical resize ' + str(winwidth)) if not is_current_buffer: self._vim.call('win_gotoid', restore) def _check_do_option(self) -> bool: if self._context['do'] != '': self._do_command(self._context['do']) return True elif (self._candidates and self._context['immediately'] or len(self._candidates) == 1 and self._context['immediately_1']): self._do_immediately() return True return not (self._context['empty'] or self._is_async or self._candidates) def _check_move_option(self) -> None: if self._context['cursor_pos'].isnumeric(): self._cursor = int(self._context['cursor_pos']) + 1 elif re.match(r'\+\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_next_line() elif re.match(r'-\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_prev_line() elif self._context['cursor_pos'] == '$': self._move_to_last_line() def _do_immediately(self) -> None: goto = self._winid > 0 and self._vim.call( 'win_gotoid', self._winid) if goto: # Jump to denite window self._init_buffer() self.do_action('default') candidate = self._get_cursor_candidate() if not candidate: return echo(self._vim, 'Normal', '[{}/{}] {}'.format( self._cursor, len(self._candidates), candidate.get('abbr', candidate['word']))) if goto: # Move to the previous window self._vim.command('wincmd p') def _do_command(self, command: str) -> None: self._init_cursor() cursor = 1 while cursor < len(self._candidates): self.do_action('default', command) self._move_to_next_line() self._quit_buffer() def _cleanup(self) -> None: self._stop_timer('update_candidates') self._stop_timer('update_buffer') if self._vim.current.buffer.number == self._bufnr: self._cursor = self._vim.call('line', '.') # Note: Close filter window before preview window self._vim.call('denite#filter#_close_filter_window') if not self._context['has_preview_window']: self._vim.command('pclose!') # Clear previewed buffers for bufnr in self._vim.vars['denite#_previewed_buffers'].keys(): if not self._vim.call('win_findbuf', bufnr): self._vim.command('silent bdelete ' + str(bufnr)) self._vim.vars['denite#_previewed_buffers'] = {} self._vim.command('highlight! link CursorLine CursorLine') if self._floating or self._filter_floating: self._vim.options['titlestring'] = self._titlestring self._vim.options['ruler'] = self._ruler def _close_current_window(self) -> None: if self._vim.call('winnr', '$') == 1: self._vim.command('buffer #') else: self._vim.command('close!') def _quit_buffer(self) -> None: self._cleanup() if self._vim.call('bufwinnr', self._bufnr) < 0: # Denite buffer is already closed return winids = self._vim.call('win_findbuf', self._vim.vars['denite#_filter_bufnr']) if winids: # Quit filter buffer self._vim.call('win_gotoid', winids[0]) self._close_current_window() # Move to denite window self._vim.call('win_gotoid', self._winid) # Restore the window if self._context['split'] == 'no': self._switch_prev_buffer() for k, v in self._save_window_options.items(): self._vim.current.window.options[k] = v else: if self._context['split'] == 'tab': self._vim.command('tabclose!') if self._context['split'] != 'tab': self._close_current_window() self._vim.call('win_gotoid', self._prev_winid) # Restore the position self._vim.call('setpos', '.', self._prev_curpos) if self._get_wininfo() and self._get_wininfo() == self._prev_wininfo: # Note: execute restcmd twice to restore layout properly self._vim.command(self._winrestcmd) self._vim.command(self._winrestcmd) clearmatch(self._vim) def _get_cursor_candidate(self) -> Candidate: return self._get_candidate(self._cursor) def _get_candidate(self, pos: int) -> Candidate: if not self._candidates or pos > len(self._candidates): return {} return self._candidates[pos - 1] def _get_selected_candidates(self) -> Candidates: if not self._selected_candidates: return [self._get_cursor_candidate() ] if self._get_cursor_candidate() else [] return [self._candidates[x] for x in self._selected_candidates] def _init_denite(self) -> None: if self._denite: self._denite.start(self._context) self._denite.on_init(self._context) self._initialized = True self._winheight = self._context['winheight'] self._winwidth = self._context['winwidth'] def _gather_candidates(self) -> None: self._selected_candidates = [] if self._denite: self._denite.gather_candidates(self._context) def _init_cursor(self) -> None: if self._context['reversed']: self._move_to_last_line() else: self._move_to_first_line() def _move_to_pos(self, pos: int) -> None: self._vim.call('cursor', pos, 0) self._cursor = pos if self._context['reversed']: self._vim.command('normal! zb') def _move_to_next_line(self) -> None: if self._cursor < len(self._candidates): self._cursor += 1 def _move_to_prev_line(self) -> None: if self._cursor >= 1: self._cursor -= 1 def _move_to_first_line(self) -> None: self._cursor = 1 def _move_to_last_line(self) -> None: self._cursor = len(self._candidates) def _start_timer(self, key: str) -> None: if key in self._timers: return if key == 'update_candidates': self._timers[key] = self._vim.call( 'denite#helper#_start_update_candidates_timer', self._bufnr) elif key == 'update_buffer': self._timers[key] = self._vim.call( 'denite#helper#_start_update_buffer_timer', self._bufnr) def _stop_timer(self, key: str) -> None: if key not in self._timers: return self._vim.call('timer_stop', self._timers[key]) # Note: After timer_stop is called, self._timers may be removed if key in self._timers: self._timers.pop(key) def _split_floating(self, split: str) -> None: # Use floating window if split == 'floating': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'editor', 'row': self._context['winrow'], 'col': self._context['wincol'], 'width': self._context['winwidth'], 'height': self._context['winheight'], }) elif split == 'floating_relative_cursor': opened_pos = (self._vim.call('nvim_win_get_position', 0)[0] + self._vim.call('winline') - 1) if self._context['auto_resize']: height = max(self._winheight, 1) width = max(self._winwidth, 1) else: width = self._context['winwidth'] height = self._context['winheight'] if opened_pos + height + 3 > self._vim.options['lines']: anchor = 'SW' row = 0 self._context['filter_winrow'] = row + opened_pos else: anchor = 'NW' row = 1 self._context['filter_winrow'] = row + height + opened_pos self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'cursor', 'row': row, 'col': 0, 'width': width, 'height': height, 'anchor': anchor, }) elif split == 'floating_relative_window': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'win', 'row': self._context['winrow'], 'col': self._context['wincol'], 'width': self._context['winwidth'], 'height': self._context['winheight'], })
[((177, 11, 177, 73), 're.search', 're.search', ({(177, 21, 177, 41): '"""\\\\[Command Line\\\\]$"""', (177, 43, 177, 72): 'self._vim.current.buffer.name'}, {}), "('\\\\[Command Line\\\\]$', self._vim.current.buffer.name)", False, 'import re\n'), ((814, 8, 814, 29), 'denite.util.clearmatch', 'clearmatch', ({(814, 19, 814, 28): 'self._vim'}, {}), '(self._vim)', False, 'from denite.util import echo, error, clearmatch, regex_convert_py_vim\n'), ((64, 27, 64, 48), 'denite.parent.SyncParent', 'SyncParent', ({(64, 38, 64, 47): 'self._vim'}, {}), '(self._vim)', False, 'from denite.parent import SyncParent\n'), ((227, 43, 227, 73), 'denite.ui.map.do_map', 'do_map', ({(227, 50, 227, 54): 'self', (227, 56, 227, 68): '"""quick_move"""', (227, 70, 227, 72): '[]'}, {}), "(self, 'quick_move', [])", False, 'from denite.ui.map import do_map\n'), ((231, 12, 231, 50), 'denite.ui.map.do_map', 'do_map', ({(231, 19, 231, 23): 'self', (231, 25, 231, 45): '"""open_filter_buffer"""', (231, 47, 231, 49): '[]'}, {}), "(self, 'open_filter_buffer', [])", False, 'from denite.ui.map import do_map\n'), ((715, 13, 715, 60), 're.match', 're.match', ({(715, 22, 715, 30): '"""\\\\+\\\\d+"""', (715, 32, 715, 59): "self._context['cursor_pos']"}, {}), "('\\\\+\\\\d+', self._context['cursor_pos'])", False, 'import re\n'), ((209, 16, 209, 49), 'denite.util.error', 'error', ({(209, 22, 209, 31): 'self._vim', (209, 33, 209, 48): '"""Empty sources"""'}, {}), "(self._vim, 'Empty sources')", False, 'from denite.util import echo, error, clearmatch, regex_convert_py_vim\n'), ((610, 29, 610, 58), 're.search', 're.search', ({(610, 39, 610, 51): '"""[^a-zA-Z]"""', (610, 53, 610, 57): 'name'}, {}), "('[^a-zA-Z]', name)", False, 'import re\n'), ((609, 26, 609, 69), 're.sub', 're.sub', ({(609, 33, 609, 55): '"""([a-zA-Z])[a-zA-Z]+"""', (609, 57, 609, 62): '"""\\\\1"""', (609, 64, 609, 68): 'name'}, {}), "('([a-zA-Z])[a-zA-Z]+', '\\\\1', name)", False, 'import re\n'), ((718, 13, 718, 59), 're.match', 're.match', ({(718, 22, 718, 29): '"""-\\\\d+"""', (718, 31, 718, 58): "self._context['cursor_pos']"}, {}), "('-\\\\d+', self._context['cursor_pos'])", False, 'import re\n'), ((517, 28, 517, 71), 'denite.util.regex_convert_py_vim', 'regex_convert_py_vim', ({(517, 49, 517, 70): 'self._matched_pattern'}, {}), '(self._matched_pattern)', False, 'from denite.util import echo, error, clearmatch, regex_convert_py_vim\n')]
yuanz271/PyDSTool
PyDSTool/core/context_managers.py
886c143cdd192aea204285f3a1cb4968c763c646
# -*- coding: utf-8 -*- """Context managers implemented for (mostly) internal use""" import contextlib import functools from io import UnsupportedOperation import os import sys __all__ = ["RedirectStdout", "RedirectStderr"] @contextlib.contextmanager def _stdchannel_redirected(stdchannel, dest_filename, mode="w"): """ A context manager to temporarily redirect stdout or stderr Originally by Marc Abramowitz, 2013 (http://marc-abramowitz.com/archives/2013/07/19/python-context-manager-for-redirected-stdout-and-stderr/) """ oldstdchannel = None dest_file = None try: if stdchannel is None: yield iter([None]) else: oldstdchannel = os.dup(stdchannel.fileno()) dest_file = open(dest_filename, mode) os.dup2(dest_file.fileno(), stdchannel.fileno()) yield except (UnsupportedOperation, AttributeError): yield iter([None]) finally: if oldstdchannel is not None: os.dup2(oldstdchannel, stdchannel.fileno()) if dest_file is not None: dest_file.close() RedirectStdout = functools.partial(_stdchannel_redirected, sys.stdout) RedirectStderr = functools.partial(_stdchannel_redirected, sys.stderr) RedirectNoOp = functools.partial(_stdchannel_redirected, None, "")
[((43, 17, 43, 70), 'functools.partial', 'functools.partial', ({(43, 35, 43, 57): '_stdchannel_redirected', (43, 59, 43, 69): 'sys.stdout'}, {}), '(_stdchannel_redirected, sys.stdout)', False, 'import functools\n'), ((44, 17, 44, 70), 'functools.partial', 'functools.partial', ({(44, 35, 44, 57): '_stdchannel_redirected', (44, 59, 44, 69): 'sys.stderr'}, {}), '(_stdchannel_redirected, sys.stderr)', False, 'import functools\n'), ((45, 15, 45, 66), 'functools.partial', 'functools.partial', ({(45, 33, 45, 55): '_stdchannel_redirected', (45, 57, 45, 61): 'None', (45, 63, 45, 65): '""""""'}, {}), "(_stdchannel_redirected, None, '')", False, 'import functools\n')]
Muzzy73/pos_kiosk
pos_kiosk/hooks.py
1ed42cfaeb15f009293b76d05dd85bd322b42f03
# -*- coding: utf-8 -*- from __future__ import unicode_literals from . import __version__ as app_version app_name = "pos_kiosk" app_title = "Pos Kiosk" app_publisher = "9t9it" app_description = "Kiosk App" app_icon = "octicon octicon-file-directory" app_color = "grey" app_email = "[email protected]" app_license = "MIT" # Includes in <head> # ------------------ # include js, css files in header of desk.html # app_include_css = "/assets/pos_kiosk/css/pos_kiosk.css" # app_include_js = "/assets/pos_kiosk/js/pos_kiosk.js" # include js, css files in header of web template # web_include_css = "/assets/pos_kiosk/css/pos_kiosk.css" # web_include_js = "/assets/pos_kiosk/js/pos_kiosk.js" # include js in page # page_js = {"page" : "public/js/file.js"} # page_js = { # "kiosk": ["public/js/pos_page_js.js", "public/js/includes/number_to_words.js"] # } # include js in doctype views # doctype_js = {"doctype" : "public/js/doctype.js"} # doctype_list_js = {"doctype" : "public/js/doctype_list.js"} # doctype_tree_js = {"doctype" : "public/js/doctype_tree.js"} # doctype_calendar_js = {"doctype" : "public/js/doctype_calendar.js"} fixtures = [ { "doctype": "Custom Field", "filters": [ [ "name", "in", [ "Sales Invoice Item-pos_kiosk", "Mode of Payment-logo" ] ] ] } ] # Home Pages # ---------- # application home page (will override Website Settings) # home_page = "login" # website user home page (by Role) # role_home_page = { # "Role": "home_page" # } # Website user home page (by function) # get_website_user_home_page = "pos_kiosk.utils.get_home_page" # Generators # ---------- # automatically create page for each record of this doctype # website_generators = ["Web Page"] # Installation # ------------ # before_install = "pos_kiosk.install.before_install" # after_install = "pos_kiosk.install.after_install" # Desk Notifications # ------------------ # See frappe.core.notifications.get_notification_config # notification_config = "pos_kiosk.notifications.get_notification_config" # Permissions # ----------- # Permissions evaluated in scripted ways # permission_query_conditions = { # "Event": "frappe.desk.doctype.event.event.get_permission_query_conditions", # } # # has_permission = { # "Event": "frappe.desk.doctype.event.event.has_permission", # } # Document Events # --------------- # Hook on document methods and events # doc_events = { # "*": { # "on_update": "method", # "on_cancel": "method", # "on_trash": "method" # } # } # Scheduled Tasks # --------------- # scheduler_events = { # "all": [ # "pos_kiosk.tasks.all" # ], # "daily": [ # "pos_kiosk.tasks.daily" # ], # "hourly": [ # "pos_kiosk.tasks.hourly" # ], # "weekly": [ # "pos_kiosk.tasks.weekly" # ] # "monthly": [ # "pos_kiosk.tasks.monthly" # ] # } # Testing # ------- # before_tests = "pos_kiosk.install.before_tests" # Overriding Whitelisted Methods # ------------------------------ # # override_whitelisted_methods = { # "pos_bahrain.api.get_item_details.get_item_details": "pos_kiosk.api.item.get_item_details" # noqa # }
[]
gcouti/pypagAI
pypagai/models/model_lstm.py
d08fac95361dcc036d890a88cb86ce090322a612
from keras import Model, Input from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten from keras.optimizers import Adam from pypagai.models.base import KerasModel class SimpleLSTM(KerasModel): """ Use a simple lstm neural network """ @staticmethod def default_config(): config = KerasModel.default_config() config['hidden'] = 32 return config def __init__(self, cfg): super().__init__(cfg) self._cfg_ = cfg def _create_network_(self): hidden = self._cfg_['hidden'] story = Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen, ), name='question') conc = concatenate([story, question],) conc = Reshape((1, int(conc.shape[1])))(conc) conc = Permute((2, 1))(conc) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response = Dense(self._vocab_size, activation='softmax')(response) self._model = Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class EmbedLSTM(KerasModel): """ Use a simple lstm neural network """ @staticmethod def default_config(): config = KerasModel.default_config() config['hidden'] = 32 return config def __init__(self, cfg): super().__init__(cfg) self._cfg_ = cfg def _create_network_(self): hidden = self._cfg_['hidden'] story = Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen, ), name='question') eb_story = Embedding(self._vocab_size, 64)(story) eb_story = Dropout(0.3)(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question = Dropout(0.3)(eb_question) conc = concatenate([eb_story, eb_question], axis=1) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response = Dense(self._vocab_size, activation='softmax')(response) self._model = Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class ConvLSTM(KerasModel): """ Use a simple lstm neural network """ @staticmethod def default_config(): config = KerasModel.default_config() config['hidden'] = 32 return config def __init__(self, model_cfg): super().__init__(model_cfg) self._cfg = model_cfg def _create_network_(self): hidden = self._cfg['hidden'] story = Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen, ), name='question') eb_story = Embedding(self._vocab_size, 64)(story) eb_story = Convolution1D(64, 3, padding='same')(eb_story) eb_story = Convolution1D(32, 3, padding='same')(eb_story) eb_story = Convolution1D(16, 3, padding='same')(eb_story) # eb_story = Flatten()(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question = Convolution1D(64, 3, padding='same')(eb_question) eb_question = Convolution1D(32, 3, padding='same')(eb_question) eb_question = Convolution1D(16, 3, padding='same')(eb_question) # eb_question = Flatten()(eb_question) conc = concatenate([eb_story, eb_question], axis=1) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response = Dense(self._vocab_size, activation='softmax')(response) self._model = Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy'])
[((14, 17, 14, 44), 'pypagai.models.base.KerasModel.default_config', 'KerasModel.default_config', ({}, {}), '()', False, 'from pypagai.models.base import KerasModel\n'), ((25, 16, 25, 59), 'keras.Input', 'Input', (), '', False, 'from keras import Model, Input\n'), ((26, 19, 26, 65), 'keras.Input', 'Input', (), '', False, 'from keras import Model, Input\n'), ((28, 15, 28, 46), 'keras.layers.concatenate', 'concatenate', ({(28, 27, 28, 44): '[story, question]'}, {}), '([story, question])', False, 'from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten\n'), ((35, 22, 35, 71), 'keras.Model', 'Model', (), '', False, 'from keras import Model, Input\n'), ((46, 17, 46, 44), 'pypagai.models.base.KerasModel.default_config', 'KerasModel.default_config', ({}, {}), '()', False, 'from pypagai.models.base import KerasModel\n'), ((58, 16, 58, 59), 'keras.Input', 'Input', (), '', False, 'from keras import Model, Input\n'), ((59, 19, 59, 65), 'keras.Input', 'Input', (), '', False, 'from keras import Model, Input\n'), ((67, 15, 67, 59), 'keras.layers.concatenate', 'concatenate', (), '', False, 'from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten\n'), ((72, 22, 72, 71), 'keras.Model', 'Model', (), '', False, 'from keras import Model, Input\n'), ((83, 17, 83, 44), 'pypagai.models.base.KerasModel.default_config', 'KerasModel.default_config', ({}, {}), '()', False, 'from pypagai.models.base import KerasModel\n'), ((95, 16, 95, 59), 'keras.Input', 'Input', (), '', False, 'from keras import Model, Input\n'), ((96, 19, 96, 65), 'keras.Input', 'Input', (), '', False, 'from keras import Model, Input\n'), ((110, 15, 110, 59), 'keras.layers.concatenate', 'concatenate', (), '', False, 'from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten\n'), ((115, 22, 115, 71), 'keras.Model', 'Model', (), '', False, 'from keras import Model, Input\n'), ((30, 15, 30, 30), 'keras.layers.Permute', 'Permute', ({(30, 23, 30, 29): '(2, 1)'}, {}), '((2, 1))', False, 'from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten\n'), ((32, 19, 32, 67), 'keras.layers.LSTM', 'LSTM', (), '', False, 'from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten\n'), ((33, 19, 33, 64), 'keras.layers.Dense', 'Dense', (), '', False, 'from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten\n'), ((61, 19, 61, 50), 'keras.layers.Embedding', 'Embedding', ({(61, 29, 61, 45): 'self._vocab_size', (61, 47, 61, 49): '64'}, {}), '(self._vocab_size, 64)', False, 'from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten\n'), ((62, 19, 62, 31), 'keras.layers.Dropout', 'Dropout', ({(62, 27, 62, 30): '0.3'}, {}), '(0.3)', False, 'from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten\n'), ((64, 22, 64, 53), 'keras.layers.Embedding', 'Embedding', ({(64, 32, 64, 48): 'self._vocab_size', (64, 50, 64, 52): '64'}, {}), '(self._vocab_size, 64)', False, 'from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten\n'), ((65, 22, 65, 34), 'keras.layers.Dropout', 'Dropout', ({(65, 30, 65, 33): '0.3'}, {}), '(0.3)', False, 'from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten\n'), ((69, 19, 69, 67), 'keras.layers.LSTM', 'LSTM', (), '', False, 'from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten\n'), ((70, 19, 70, 64), 'keras.layers.Dense', 'Dense', (), '', False, 'from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten\n'), ((98, 19, 98, 50), 'keras.layers.Embedding', 'Embedding', ({(98, 29, 98, 45): 'self._vocab_size', (98, 47, 98, 49): '64'}, {}), '(self._vocab_size, 64)', False, 'from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten\n'), ((99, 19, 99, 55), 'keras.layers.Convolution1D', 'Convolution1D', (), '', False, 'from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten\n'), ((100, 19, 100, 55), 'keras.layers.Convolution1D', 'Convolution1D', (), '', False, 'from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten\n'), ((101, 19, 101, 55), 'keras.layers.Convolution1D', 'Convolution1D', (), '', False, 'from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten\n'), ((104, 22, 104, 53), 'keras.layers.Embedding', 'Embedding', ({(104, 32, 104, 48): 'self._vocab_size', (104, 50, 104, 52): '64'}, {}), '(self._vocab_size, 64)', False, 'from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten\n'), ((105, 22, 105, 58), 'keras.layers.Convolution1D', 'Convolution1D', (), '', False, 'from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten\n'), ((106, 22, 106, 58), 'keras.layers.Convolution1D', 'Convolution1D', (), '', False, 'from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten\n'), ((107, 22, 107, 58), 'keras.layers.Convolution1D', 'Convolution1D', (), '', False, 'from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten\n'), ((112, 19, 112, 67), 'keras.layers.LSTM', 'LSTM', (), '', False, 'from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten\n'), ((113, 19, 113, 64), 'keras.layers.Dense', 'Dense', (), '', False, 'from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten\n'), ((36, 38, 36, 51), 'keras.optimizers.Adam', 'Adam', (), '', False, 'from keras.optimizers import Adam\n'), ((73, 38, 73, 51), 'keras.optimizers.Adam', 'Adam', (), '', False, 'from keras.optimizers import Adam\n'), ((116, 38, 116, 51), 'keras.optimizers.Adam', 'Adam', (), '', False, 'from keras.optimizers import Adam\n')]
joelouismarino/variational_rl
lib/variables/latent_variables/__init__.py
11dc14bfb56f3ebbfccd5de206b78712a8039a9a
from .fully_connected import FullyConnectedLatentVariable from .convolutional import ConvolutionalLatentVariable
[]
lpj0822/image_point_cloud_det
easyai/model/backbone/cls/pnasnet.py
7b20e2f42f3f2ff4881485da58ad188a1f0d0e0f
#!/usr/bin/env python # -*- coding:utf-8 -*- # Author: ''' PNASNet in PyTorch. Paper: Progressive Neural Architecture Search ''' from easyai.base_name.block_name import NormalizationType, ActivationType from easyai.base_name.backbone_name import BackboneName from easyai.model.backbone.utility.base_backbone import * from easyai.model.base_block.utility.utility_block import ConvBNActivationBlock from easyai.model.base_block.cls.pnasnet_block import CellA, CellB __all__ = ['pnasnet_A', 'pnasnet_B'] class PNASNet(BaseBackbone): def __init__(self, data_channel=3, num_cells=6, num_planes=44, block=CellA, bnName=NormalizationType.BatchNormalize2d, activationName=ActivationType.ReLU): super().__init__() self.set_name(BackboneName.PNASNetA) self.data_channel = data_channel self.num_cells = num_cells self.block = block self.activation_name = activationName self.bn_name = bnName self.first_output = num_planes self.in_planes = self.first_output self.create_block_list() def create_block_list(self): self.block_out_channels = [] self.index = 0 layer1 = ConvBNActivationBlock(in_channels=self.data_channel, out_channels=self.first_output, kernel_size=3, stride=1, padding=1, bias=False, bnName=self.bn_name, activationName=self.activation_name) self.add_block_list(layer1.get_name(), layer1, self.first_output) self.make_layer(self.first_output, self.num_cells) self.downsample(self.first_output * 2) self.make_layer(self.first_output * 2, self.num_cells) self.downsample(self.first_output * 4) self.make_layer(self.first_output * 4, self.num_cells) def make_layer(self, planes, num_cells): for _ in range(num_cells): temp_block = self.block(self.in_planes, planes, stride=1, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(temp_block.get_name(), temp_block, planes) self.in_planes = planes def downsample(self, planes): down_block = self.block(self.in_planes, planes, stride=2, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(down_block.get_name(), down_block, planes) self.in_planes = planes def forward(self, x): output_list = [] for block in self._modules.values(): x = block(x) output_list.append(x) return output_list def pnasnet_A(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6, num_planes=44, block=CellA) model.set_name(BackboneName.PNASNetA) return model def pnasnet_B(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6, num_planes=32, block=CellB) model.set_name(BackboneName.PNASNetB) return model
[((39, 17, 46, 75), 'easyai.model.base_block.utility.utility_block.ConvBNActivationBlock', 'ConvBNActivationBlock', (), '', False, 'from easyai.model.base_block.utility.utility_block import ConvBNActivationBlock\n')]
cugxy/map_download
map_download/cmd/TerrainDownloader.py
02142b33edb2bc163f7ae971f443efe84c13e029
# -*- coding: utf-8 -*- # coding=utf-8 import json import os import math import logging import requests import time from map_download.cmd.BaseDownloader import DownloadEngine, BaseDownloaderThread, latlng2tile_terrain, BoundBox def get_access_token(token): resp = None request_count = 0 url = "https://api.cesium.com/v1/assets/1/endpoint" while True: if request_count > 4: break try: request_count += 1 param = {'access_token': token} resp = requests.get(url, params=param, timeout=2) if resp.status_code != 200: continue break except Exception as e: resp = None time.sleep(3) if resp is None: return None resp_json = resp.json() return resp_json.get('accessToken') class TerrainDownloaderThread(BaseDownloaderThread): URL = "https://assets.cesium.com/1/{z}/{x}/{y}.terrain?extensions=octvertexnormals-watermask&v=1.1.0" def __init__(self, root_dir, bbox, token, task_q, logger=None, write_db=False): super(TerrainDownloaderThread, self).__init__( root_dir, bbox, task_q, logger, write_db=write_db, db_file_name='Terrain.db') self.token = token self._init_metadata( format='terrain', bounds='%f,%f,%f,%f' % (self.bbox.min_lng, self.bbox.min_lat, self.bbox.max_lng, self.bbox.max_lat)) def get_url(self, x, y, z): return self.URL.format(x=x, y=y, z=z) def _download(self, x, y, z): file_path = '%s/%s/%i/%i/%i.%s' % (self.root_dir, 'Terrain', z, x, y, 'terrain') if os.path.exists(file_path): self._data2DB(x, y, z, file_path) return 0 os.makedirs(os.path.dirname(file_path), exist_ok=True) resp = None requre_count = 0 _url = '' access_token = get_access_token(self.token) if access_token is None: return -1 param = {'extensions': 'octvertexnormals-watermask', 'v': '1.1.0', 'access_token': access_token} while True: if requre_count > 4: break try: _url = self.get_url(x, y, z) resp = requests.get(_url, params=param, stream=True, timeout=2) break except Exception as e: resp = None time.sleep(3) requre_count += 1 if resp is None: return -1 if resp.status_code != 200: return -1 try: with open(file_path, 'wb') as f: for chunk in resp.iter_content(chunk_size=1024): if chunk: f.write(chunk) except Exception as e: return -1 self._data2DB(x, y, z, file_path) return 1 class TerrainDownloadEngine(DownloadEngine): root_dir = '' def __init__(self, root_dir, bbox, token, thread_num, logger=None, write_db=False): super(TerrainDownloadEngine, self).__init__(bbox, thread_num, logger, write_db=write_db) self.root_dir = root_dir self.token = token def bbox2xyz(self, bbox, z): min_x, min_y = latlng2tile_terrain(bbox.min_lat, bbox.min_lng, z) max_x, max_y = latlng2tile_terrain(bbox.max_lat, bbox.max_lng, z) return math.floor(min_x), math.floor(min_y), math.ceil(max_x) + 1, math.ceil(max_y) + 1 def generate_metadata(self): try: metadatas = { "attribution": "© Analytical Graphics Inc., © CGIAR-CSI, Produced using Copernicus data and " "information funded by the European Union - EU-DEM layers", "available": [ [ { "endX": 1, "endY": 0, "startX": 0, "startY": 0 } ], [ { "endX": 3, "endY": 1, "startX": 0, "startY": 0 } ], [ { "endX": 7, "endY": 3, "startX": 0, "startY": 0 } ], [ { "endX": 15, "endY": 7, "startX": 0, "startY": 0 } ], [ { "endX": 31, "endY": 15, "startX": 0, "startY": 0 } ], [ { "endX": 63, "endY": 31, "startX": 0, "startY": 0 } ], [ { "endX": 127, "endY": 63, "startX": 0, "startY": 0 } ], [ { "endX": 255, "endY": 127, "startX": 0, "startY": 0 } ], [ { "endX": 511, "endY": 255, "startX": 0, "startY": 0 } ], [ { "endX": 1023, "endY": 511, "startX": 0, "startY": 0 } ], [ { "endX": 2047, "endY": 1023, "startX": 0, "startY": 0 } ], [ { "endX": 4095, "endY": 2047, "startX": 0, "startY": 0 } ], [ { "endX": 8191, "endY": 4095, "startX": 0, "startY": 0 } ], [ { "endX": 16383, "endY": 8191, "startX": 0, "startY": 0 } ], [ { "endX": 32767, "endY": 16383, "startX": 0, "startY": 0 } ] ], "bounds": [-180, -90, 180, 90, ], "description": "STK World Terrain Premium Tileset, v1.3. 10m - 30m resolution CONUS, 30m resolution " "SRTM between 60N and 60S, 30m Europe. Minimum global coverage of 1000m.", "extensions": ["watermask", "vertexnormals", "octvertexnormals", ], "format": "quantized-mesh-1.0", "maxzoom": 13, "minzoom": 0, "name": "world", "projection": "EPSG:4326", "scheme": "tms", "tilejson": "2.1.0", "tiles": ["{z}/{x}/{y}.terrain?v={version}", ], "version": "1.31376.0" } _dir = os.path.join(self.root_dir, 'Terrain') os.makedirs(_dir, exist_ok=True) metadatas_path = os.path.join(_dir, 'layer.json') with open(metadatas_path, 'w') as f: json.dump(metadatas, f) except Exception as e: if self.logger is not None: self.logger.exception(e) def run(self): try: self.generate_metadata() count = 0 bboxs = self.cut_bbox() for bbox in bboxs: _count = self.get_task_count(bbox) count += _count self.division_done_signal.emit(count) for bbox in bboxs: while True: if not self.running: time.sleep(0.01) else: break task_q = self.get_task_queue(bbox) self.threads = [] for i in range(self.thread_num): thread = TerrainDownloaderThread(self.root_dir, self.bbox, self.token, task_q, self.logger, write_db=self.write_db) thread.sub_progressBar_updated_signal.connect(self.sub_update_progressBar) self.threads.append(thread) for thread in self.threads: thread.start() for thread in self.threads: thread.wait() for t in self.threads: t.stop() t.quit() self.threads = [] self.download_done_signal.emit() except Exception as e: if self.logger is not None: self.logger.error(e) if __name__ == '__main__': if 1: logger = logging.getLogger('down') try: root = r'/Users/cugxy/Documents/data/downloader' formatter = logging.Formatter('%(levelname)s-%(message)s') hdlr = logging.StreamHandler() log_file = os.path.join(root, 'down.log') file_hdlr = logging.FileHandler(log_file) file_hdlr.setFormatter(formatter) logger.addHandler(file_hdlr) logger.addHandler(hdlr) logger.setLevel(logging.INFO) min_lng = -180.0 max_lng = 180.0 min_lat = -90.0 max_lat = 90.0 start_zoom = 0 end_zoom = 5 bbox = BoundBox(max_lat, max_lng, min_lat, min_lng, start_zoom, end_zoom) d = TerrainDownloadEngine(root, bbox, 8, logger) d.start() time.sleep(10000) logger.error('main thread out') except Exception as e: logger.error(e) if 0: accessToken = get_access_token() pass
[((52, 11, 52, 36), 'os.path.exists', 'os.path.exists', ({(52, 26, 52, 35): 'file_path'}, {}), '(file_path)', False, 'import os\n'), ((97, 23, 97, 73), 'map_download.cmd.BaseDownloader.latlng2tile_terrain', 'latlng2tile_terrain', ({(97, 43, 97, 55): 'bbox.min_lat', (97, 57, 97, 69): 'bbox.min_lng', (97, 71, 97, 72): 'z'}, {}), '(bbox.min_lat, bbox.min_lng, z)', False, 'from map_download.cmd.BaseDownloader import DownloadEngine, BaseDownloaderThread, latlng2tile_terrain, BoundBox\n'), ((98, 23, 98, 73), 'map_download.cmd.BaseDownloader.latlng2tile_terrain', 'latlng2tile_terrain', ({(98, 43, 98, 55): 'bbox.max_lat', (98, 57, 98, 69): 'bbox.max_lng', (98, 71, 98, 72): 'z'}, {}), '(bbox.max_lat, bbox.max_lng, z)', False, 'from map_download.cmd.BaseDownloader import DownloadEngine, BaseDownloaderThread, latlng2tile_terrain, BoundBox\n'), ((289, 17, 289, 42), 'logging.getLogger', 'logging.getLogger', ({(289, 35, 289, 41): '"""down"""'}, {}), "('down')", False, 'import logging\n'), ((23, 19, 23, 61), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((55, 20, 55, 46), 'os.path.dirname', 'os.path.dirname', ({(55, 36, 55, 45): 'file_path'}, {}), '(file_path)', False, 'import os\n'), ((99, 15, 99, 32), 'math.floor', 'math.floor', ({(99, 26, 99, 31): 'min_x'}, {}), '(min_x)', False, 'import math\n'), ((99, 34, 99, 51), 'math.floor', 'math.floor', ({(99, 45, 99, 50): 'min_y'}, {}), '(min_y)', False, 'import math\n'), ((242, 19, 242, 57), 'os.path.join', 'os.path.join', ({(242, 32, 242, 45): 'self.root_dir', (242, 47, 242, 56): '"""Terrain"""'}, {}), "(self.root_dir, 'Terrain')", False, 'import os\n'), ((243, 12, 243, 44), 'os.makedirs', 'os.makedirs', (), '', False, 'import os\n'), ((244, 29, 244, 61), 'os.path.join', 'os.path.join', ({(244, 42, 244, 46): '_dir', (244, 48, 244, 60): '"""layer.json"""'}, {}), "(_dir, 'layer.json')", False, 'import os\n'), ((292, 24, 292, 70), 'logging.Formatter', 'logging.Formatter', ({(292, 42, 292, 69): '"""%(levelname)s-%(message)s"""'}, {}), "('%(levelname)s-%(message)s')", False, 'import logging\n'), ((293, 19, 293, 42), 'logging.StreamHandler', 'logging.StreamHandler', ({}, {}), '()', False, 'import logging\n'), ((294, 23, 294, 53), 'os.path.join', 'os.path.join', ({(294, 36, 294, 40): 'root', (294, 42, 294, 52): '"""down.log"""'}, {}), "(root, 'down.log')", False, 'import os\n'), ((295, 24, 295, 53), 'logging.FileHandler', 'logging.FileHandler', ({(295, 44, 295, 52): 'log_file'}, {}), '(log_file)', False, 'import logging\n'), ((306, 19, 306, 85), 'map_download.cmd.BaseDownloader.BoundBox', 'BoundBox', ({(306, 28, 306, 35): 'max_lat', (306, 37, 306, 44): 'max_lng', (306, 46, 306, 53): 'min_lat', (306, 55, 306, 62): 'min_lng', (306, 64, 306, 74): 'start_zoom', (306, 76, 306, 84): 'end_zoom'}, {}), '(max_lat, max_lng, min_lat, min_lng, start_zoom, end_zoom)', False, 'from map_download.cmd.BaseDownloader import DownloadEngine, BaseDownloaderThread, latlng2tile_terrain, BoundBox\n'), ((309, 12, 309, 29), 'time.sleep', 'time.sleep', ({(309, 23, 309, 28): '(10000)'}, {}), '(10000)', False, 'import time\n'), ((29, 12, 29, 25), 'time.sleep', 'time.sleep', ({(29, 23, 29, 24): '(3)'}, {}), '(3)', False, 'import time\n'), ((67, 23, 67, 79), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((99, 53, 99, 69), 'math.ceil', 'math.ceil', ({(99, 63, 99, 68): 'max_x'}, {}), '(max_x)', False, 'import math\n'), ((99, 75, 99, 91), 'math.ceil', 'math.ceil', ({(99, 85, 99, 90): 'max_y'}, {}), '(max_y)', False, 'import math\n'), ((246, 16, 246, 39), 'json.dump', 'json.dump', ({(246, 26, 246, 35): 'metadatas', (246, 37, 246, 38): 'f'}, {}), '(metadatas, f)', False, 'import json\n'), ((71, 16, 71, 29), 'time.sleep', 'time.sleep', ({(71, 27, 71, 28): '(3)'}, {}), '(3)', False, 'import time\n'), ((263, 24, 263, 40), 'time.sleep', 'time.sleep', ({(263, 35, 263, 39): '(0.01)'}, {}), '(0.01)', False, 'import time\n')]
dineshsonachalam/kubernetes_asyncio
kubernetes_asyncio/client/api/rbac_authorization_v1_api.py
d57e9e9be11f6789e1ce8d5b161acb64d29acf35
# coding: utf-8 """ Kubernetes No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501 OpenAPI spec version: v1.12.4 Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import re # noqa: F401 # python 2 and python 3 compatibility library import six from kubernetes_asyncio.client.api_client import ApiClient class RbacAuthorizationV1Api(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. Ref: https://github.com/swagger-api/swagger-codegen """ def __init__(self, api_client=None): if api_client is None: api_client = ApiClient() self.api_client = api_client def create_cluster_role(self, body, **kwargs): # noqa: E501 """create_cluster_role # noqa: E501 create a ClusterRole # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_cluster_role(body, async_req=True) >>> result = thread.get() :param async_req bool :param V1ClusterRole body: (required) :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1ClusterRole If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.create_cluster_role_with_http_info(body, **kwargs) # noqa: E501 else: (data) = self.create_cluster_role_with_http_info(body, **kwargs) # noqa: E501 return data def create_cluster_role_with_http_info(self, body, **kwargs): # noqa: E501 """create_cluster_role # noqa: E501 create a ClusterRole # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_cluster_role_with_http_info(body, async_req=True) >>> result = thread.get() :param async_req bool :param V1ClusterRole body: (required) :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1ClusterRole If the method is called asynchronously, returns the request thread. """ all_params = ['body', 'include_uninitialized', 'pretty', 'dry_run'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_cluster_role" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `create_cluster_role`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] if 'include_uninitialized' in params: query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501 if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 if 'dry_run' in params: query_params.append(('dryRun', params['dry_run'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['*/*']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/clusterroles', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1ClusterRole', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def create_cluster_role_binding(self, body, **kwargs): # noqa: E501 """create_cluster_role_binding # noqa: E501 create a ClusterRoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_cluster_role_binding(body, async_req=True) >>> result = thread.get() :param async_req bool :param V1ClusterRoleBinding body: (required) :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1ClusterRoleBinding If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.create_cluster_role_binding_with_http_info(body, **kwargs) # noqa: E501 else: (data) = self.create_cluster_role_binding_with_http_info(body, **kwargs) # noqa: E501 return data def create_cluster_role_binding_with_http_info(self, body, **kwargs): # noqa: E501 """create_cluster_role_binding # noqa: E501 create a ClusterRoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_cluster_role_binding_with_http_info(body, async_req=True) >>> result = thread.get() :param async_req bool :param V1ClusterRoleBinding body: (required) :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1ClusterRoleBinding If the method is called asynchronously, returns the request thread. """ all_params = ['body', 'include_uninitialized', 'pretty', 'dry_run'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_cluster_role_binding" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `create_cluster_role_binding`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] if 'include_uninitialized' in params: query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501 if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 if 'dry_run' in params: query_params.append(('dryRun', params['dry_run'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['*/*']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/clusterrolebindings', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1ClusterRoleBinding', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def create_namespaced_role(self, namespace, body, **kwargs): # noqa: E501 """create_namespaced_role # noqa: E501 create a Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_namespaced_role(namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str namespace: object name and auth scope, such as for teams and projects (required) :param V1Role body: (required) :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1Role If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.create_namespaced_role_with_http_info(namespace, body, **kwargs) # noqa: E501 else: (data) = self.create_namespaced_role_with_http_info(namespace, body, **kwargs) # noqa: E501 return data def create_namespaced_role_with_http_info(self, namespace, body, **kwargs): # noqa: E501 """create_namespaced_role # noqa: E501 create a Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_namespaced_role_with_http_info(namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str namespace: object name and auth scope, such as for teams and projects (required) :param V1Role body: (required) :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1Role If the method is called asynchronously, returns the request thread. """ all_params = ['namespace', 'body', 'include_uninitialized', 'pretty', 'dry_run'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_namespaced_role" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'namespace' is set if ('namespace' not in params or params['namespace'] is None): raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_role`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `create_namespaced_role`") # noqa: E501 collection_formats = {} path_params = {} if 'namespace' in params: path_params['namespace'] = params['namespace'] # noqa: E501 query_params = [] if 'include_uninitialized' in params: query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501 if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 if 'dry_run' in params: query_params.append(('dryRun', params['dry_run'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['*/*']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/roles', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Role', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def create_namespaced_role_binding(self, namespace, body, **kwargs): # noqa: E501 """create_namespaced_role_binding # noqa: E501 create a RoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_namespaced_role_binding(namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str namespace: object name and auth scope, such as for teams and projects (required) :param V1RoleBinding body: (required) :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1RoleBinding If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.create_namespaced_role_binding_with_http_info(namespace, body, **kwargs) # noqa: E501 else: (data) = self.create_namespaced_role_binding_with_http_info(namespace, body, **kwargs) # noqa: E501 return data def create_namespaced_role_binding_with_http_info(self, namespace, body, **kwargs): # noqa: E501 """create_namespaced_role_binding # noqa: E501 create a RoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_namespaced_role_binding_with_http_info(namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str namespace: object name and auth scope, such as for teams and projects (required) :param V1RoleBinding body: (required) :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1RoleBinding If the method is called asynchronously, returns the request thread. """ all_params = ['namespace', 'body', 'include_uninitialized', 'pretty', 'dry_run'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_namespaced_role_binding" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'namespace' is set if ('namespace' not in params or params['namespace'] is None): raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_role_binding`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `create_namespaced_role_binding`") # noqa: E501 collection_formats = {} path_params = {} if 'namespace' in params: path_params['namespace'] = params['namespace'] # noqa: E501 query_params = [] if 'include_uninitialized' in params: query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501 if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 if 'dry_run' in params: query_params.append(('dryRun', params['dry_run'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['*/*']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/rolebindings', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1RoleBinding', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def delete_cluster_role(self, name, body, **kwargs): # noqa: E501 """delete_cluster_role # noqa: E501 delete a ClusterRole # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_cluster_role(name, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the ClusterRole (required) :param V1DeleteOptions body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. :return: V1Status If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_cluster_role_with_http_info(name, body, **kwargs) # noqa: E501 else: (data) = self.delete_cluster_role_with_http_info(name, body, **kwargs) # noqa: E501 return data def delete_cluster_role_with_http_info(self, name, body, **kwargs): # noqa: E501 """delete_cluster_role # noqa: E501 delete a ClusterRole # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_cluster_role_with_http_info(name, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the ClusterRole (required) :param V1DeleteOptions body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. :return: V1Status If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'body', 'pretty', 'dry_run', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_cluster_role" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `delete_cluster_role`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `delete_cluster_role`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 if 'dry_run' in params: query_params.append(('dryRun', params['dry_run'])) # noqa: E501 if 'grace_period_seconds' in params: query_params.append(('gracePeriodSeconds', params['grace_period_seconds'])) # noqa: E501 if 'orphan_dependents' in params: query_params.append(('orphanDependents', params['orphan_dependents'])) # noqa: E501 if 'propagation_policy' in params: query_params.append(('propagationPolicy', params['propagation_policy'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['*/*']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/clusterroles/{name}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Status', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def delete_cluster_role_binding(self, name, body, **kwargs): # noqa: E501 """delete_cluster_role_binding # noqa: E501 delete a ClusterRoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_cluster_role_binding(name, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the ClusterRoleBinding (required) :param V1DeleteOptions body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. :return: V1Status If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_cluster_role_binding_with_http_info(name, body, **kwargs) # noqa: E501 else: (data) = self.delete_cluster_role_binding_with_http_info(name, body, **kwargs) # noqa: E501 return data def delete_cluster_role_binding_with_http_info(self, name, body, **kwargs): # noqa: E501 """delete_cluster_role_binding # noqa: E501 delete a ClusterRoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_cluster_role_binding_with_http_info(name, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the ClusterRoleBinding (required) :param V1DeleteOptions body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. :return: V1Status If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'body', 'pretty', 'dry_run', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_cluster_role_binding" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `delete_cluster_role_binding`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `delete_cluster_role_binding`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 if 'dry_run' in params: query_params.append(('dryRun', params['dry_run'])) # noqa: E501 if 'grace_period_seconds' in params: query_params.append(('gracePeriodSeconds', params['grace_period_seconds'])) # noqa: E501 if 'orphan_dependents' in params: query_params.append(('orphanDependents', params['orphan_dependents'])) # noqa: E501 if 'propagation_policy' in params: query_params.append(('propagationPolicy', params['propagation_policy'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['*/*']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/{name}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Status', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def delete_collection_cluster_role(self, **kwargs): # noqa: E501 """delete_collection_cluster_role # noqa: E501 delete collection of ClusterRole # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_collection_cluster_role(async_req=True) >>> result = thread.get() :param async_req bool :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1Status If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_collection_cluster_role_with_http_info(**kwargs) # noqa: E501 else: (data) = self.delete_collection_cluster_role_with_http_info(**kwargs) # noqa: E501 return data def delete_collection_cluster_role_with_http_info(self, **kwargs): # noqa: E501 """delete_collection_cluster_role # noqa: E501 delete collection of ClusterRole # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_collection_cluster_role_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1Status If the method is called asynchronously, returns the request thread. """ all_params = ['include_uninitialized', 'pretty', '_continue', 'field_selector', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_collection_cluster_role" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] if 'include_uninitialized' in params: query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501 if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 if '_continue' in params: query_params.append(('continue', params['_continue'])) # noqa: E501 if 'field_selector' in params: query_params.append(('fieldSelector', params['field_selector'])) # noqa: E501 if 'label_selector' in params: query_params.append(('labelSelector', params['label_selector'])) # noqa: E501 if 'limit' in params: query_params.append(('limit', params['limit'])) # noqa: E501 if 'resource_version' in params: query_params.append(('resourceVersion', params['resource_version'])) # noqa: E501 if 'timeout_seconds' in params: query_params.append(('timeoutSeconds', params['timeout_seconds'])) # noqa: E501 if 'watch' in params: query_params.append(('watch', params['watch'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['*/*']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/clusterroles', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Status', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def delete_collection_cluster_role_binding(self, **kwargs): # noqa: E501 """delete_collection_cluster_role_binding # noqa: E501 delete collection of ClusterRoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_collection_cluster_role_binding(async_req=True) >>> result = thread.get() :param async_req bool :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1Status If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_collection_cluster_role_binding_with_http_info(**kwargs) # noqa: E501 else: (data) = self.delete_collection_cluster_role_binding_with_http_info(**kwargs) # noqa: E501 return data def delete_collection_cluster_role_binding_with_http_info(self, **kwargs): # noqa: E501 """delete_collection_cluster_role_binding # noqa: E501 delete collection of ClusterRoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_collection_cluster_role_binding_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1Status If the method is called asynchronously, returns the request thread. """ all_params = ['include_uninitialized', 'pretty', '_continue', 'field_selector', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_collection_cluster_role_binding" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] if 'include_uninitialized' in params: query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501 if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 if '_continue' in params: query_params.append(('continue', params['_continue'])) # noqa: E501 if 'field_selector' in params: query_params.append(('fieldSelector', params['field_selector'])) # noqa: E501 if 'label_selector' in params: query_params.append(('labelSelector', params['label_selector'])) # noqa: E501 if 'limit' in params: query_params.append(('limit', params['limit'])) # noqa: E501 if 'resource_version' in params: query_params.append(('resourceVersion', params['resource_version'])) # noqa: E501 if 'timeout_seconds' in params: query_params.append(('timeoutSeconds', params['timeout_seconds'])) # noqa: E501 if 'watch' in params: query_params.append(('watch', params['watch'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['*/*']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/clusterrolebindings', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Status', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def delete_collection_namespaced_role(self, namespace, **kwargs): # noqa: E501 """delete_collection_namespaced_role # noqa: E501 delete collection of Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_collection_namespaced_role(namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str namespace: object name and auth scope, such as for teams and projects (required) :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1Status If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_collection_namespaced_role_with_http_info(namespace, **kwargs) # noqa: E501 else: (data) = self.delete_collection_namespaced_role_with_http_info(namespace, **kwargs) # noqa: E501 return data def delete_collection_namespaced_role_with_http_info(self, namespace, **kwargs): # noqa: E501 """delete_collection_namespaced_role # noqa: E501 delete collection of Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_collection_namespaced_role_with_http_info(namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str namespace: object name and auth scope, such as for teams and projects (required) :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1Status If the method is called asynchronously, returns the request thread. """ all_params = ['namespace', 'include_uninitialized', 'pretty', '_continue', 'field_selector', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_collection_namespaced_role" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'namespace' is set if ('namespace' not in params or params['namespace'] is None): raise ValueError("Missing the required parameter `namespace` when calling `delete_collection_namespaced_role`") # noqa: E501 collection_formats = {} path_params = {} if 'namespace' in params: path_params['namespace'] = params['namespace'] # noqa: E501 query_params = [] if 'include_uninitialized' in params: query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501 if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 if '_continue' in params: query_params.append(('continue', params['_continue'])) # noqa: E501 if 'field_selector' in params: query_params.append(('fieldSelector', params['field_selector'])) # noqa: E501 if 'label_selector' in params: query_params.append(('labelSelector', params['label_selector'])) # noqa: E501 if 'limit' in params: query_params.append(('limit', params['limit'])) # noqa: E501 if 'resource_version' in params: query_params.append(('resourceVersion', params['resource_version'])) # noqa: E501 if 'timeout_seconds' in params: query_params.append(('timeoutSeconds', params['timeout_seconds'])) # noqa: E501 if 'watch' in params: query_params.append(('watch', params['watch'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['*/*']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/roles', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Status', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def delete_collection_namespaced_role_binding(self, namespace, **kwargs): # noqa: E501 """delete_collection_namespaced_role_binding # noqa: E501 delete collection of RoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_collection_namespaced_role_binding(namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str namespace: object name and auth scope, such as for teams and projects (required) :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1Status If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_collection_namespaced_role_binding_with_http_info(namespace, **kwargs) # noqa: E501 else: (data) = self.delete_collection_namespaced_role_binding_with_http_info(namespace, **kwargs) # noqa: E501 return data def delete_collection_namespaced_role_binding_with_http_info(self, namespace, **kwargs): # noqa: E501 """delete_collection_namespaced_role_binding # noqa: E501 delete collection of RoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_collection_namespaced_role_binding_with_http_info(namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str namespace: object name and auth scope, such as for teams and projects (required) :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1Status If the method is called asynchronously, returns the request thread. """ all_params = ['namespace', 'include_uninitialized', 'pretty', '_continue', 'field_selector', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_collection_namespaced_role_binding" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'namespace' is set if ('namespace' not in params or params['namespace'] is None): raise ValueError("Missing the required parameter `namespace` when calling `delete_collection_namespaced_role_binding`") # noqa: E501 collection_formats = {} path_params = {} if 'namespace' in params: path_params['namespace'] = params['namespace'] # noqa: E501 query_params = [] if 'include_uninitialized' in params: query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501 if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 if '_continue' in params: query_params.append(('continue', params['_continue'])) # noqa: E501 if 'field_selector' in params: query_params.append(('fieldSelector', params['field_selector'])) # noqa: E501 if 'label_selector' in params: query_params.append(('labelSelector', params['label_selector'])) # noqa: E501 if 'limit' in params: query_params.append(('limit', params['limit'])) # noqa: E501 if 'resource_version' in params: query_params.append(('resourceVersion', params['resource_version'])) # noqa: E501 if 'timeout_seconds' in params: query_params.append(('timeoutSeconds', params['timeout_seconds'])) # noqa: E501 if 'watch' in params: query_params.append(('watch', params['watch'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['*/*']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/rolebindings', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Status', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def delete_namespaced_role(self, name, namespace, body, **kwargs): # noqa: E501 """delete_namespaced_role # noqa: E501 delete a Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_namespaced_role(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Role (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param V1DeleteOptions body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. :return: V1Status If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_namespaced_role_with_http_info(name, namespace, body, **kwargs) # noqa: E501 else: (data) = self.delete_namespaced_role_with_http_info(name, namespace, body, **kwargs) # noqa: E501 return data def delete_namespaced_role_with_http_info(self, name, namespace, body, **kwargs): # noqa: E501 """delete_namespaced_role # noqa: E501 delete a Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_namespaced_role_with_http_info(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Role (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param V1DeleteOptions body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. :return: V1Status If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'namespace', 'body', 'pretty', 'dry_run', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_namespaced_role" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `delete_namespaced_role`") # noqa: E501 # verify the required parameter 'namespace' is set if ('namespace' not in params or params['namespace'] is None): raise ValueError("Missing the required parameter `namespace` when calling `delete_namespaced_role`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `delete_namespaced_role`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 if 'namespace' in params: path_params['namespace'] = params['namespace'] # noqa: E501 query_params = [] if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 if 'dry_run' in params: query_params.append(('dryRun', params['dry_run'])) # noqa: E501 if 'grace_period_seconds' in params: query_params.append(('gracePeriodSeconds', params['grace_period_seconds'])) # noqa: E501 if 'orphan_dependents' in params: query_params.append(('orphanDependents', params['orphan_dependents'])) # noqa: E501 if 'propagation_policy' in params: query_params.append(('propagationPolicy', params['propagation_policy'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['*/*']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/roles/{name}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Status', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def delete_namespaced_role_binding(self, name, namespace, body, **kwargs): # noqa: E501 """delete_namespaced_role_binding # noqa: E501 delete a RoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_namespaced_role_binding(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the RoleBinding (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param V1DeleteOptions body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. :return: V1Status If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_namespaced_role_binding_with_http_info(name, namespace, body, **kwargs) # noqa: E501 else: (data) = self.delete_namespaced_role_binding_with_http_info(name, namespace, body, **kwargs) # noqa: E501 return data def delete_namespaced_role_binding_with_http_info(self, name, namespace, body, **kwargs): # noqa: E501 """delete_namespaced_role_binding # noqa: E501 delete a RoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_namespaced_role_binding_with_http_info(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the RoleBinding (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param V1DeleteOptions body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. :return: V1Status If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'namespace', 'body', 'pretty', 'dry_run', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_namespaced_role_binding" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `delete_namespaced_role_binding`") # noqa: E501 # verify the required parameter 'namespace' is set if ('namespace' not in params or params['namespace'] is None): raise ValueError("Missing the required parameter `namespace` when calling `delete_namespaced_role_binding`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `delete_namespaced_role_binding`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 if 'namespace' in params: path_params['namespace'] = params['namespace'] # noqa: E501 query_params = [] if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 if 'dry_run' in params: query_params.append(('dryRun', params['dry_run'])) # noqa: E501 if 'grace_period_seconds' in params: query_params.append(('gracePeriodSeconds', params['grace_period_seconds'])) # noqa: E501 if 'orphan_dependents' in params: query_params.append(('orphanDependents', params['orphan_dependents'])) # noqa: E501 if 'propagation_policy' in params: query_params.append(('propagationPolicy', params['propagation_policy'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['*/*']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/rolebindings/{name}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Status', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_api_resources(self, **kwargs): # noqa: E501 """get_api_resources # noqa: E501 get available resources # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_api_resources(async_req=True) >>> result = thread.get() :param async_req bool :return: V1APIResourceList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_api_resources_with_http_info(**kwargs) # noqa: E501 else: (data) = self.get_api_resources_with_http_info(**kwargs) # noqa: E501 return data def get_api_resources_with_http_info(self, **kwargs): # noqa: E501 """get_api_resources # noqa: E501 get available resources # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_api_resources_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :return: V1APIResourceList If the method is called asynchronously, returns the request thread. """ all_params = [] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_api_resources" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1APIResourceList', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def list_cluster_role(self, **kwargs): # noqa: E501 """list_cluster_role # noqa: E501 list or watch objects of kind ClusterRole # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_cluster_role(async_req=True) >>> result = thread.get() :param async_req bool :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1ClusterRoleList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.list_cluster_role_with_http_info(**kwargs) # noqa: E501 else: (data) = self.list_cluster_role_with_http_info(**kwargs) # noqa: E501 return data def list_cluster_role_with_http_info(self, **kwargs): # noqa: E501 """list_cluster_role # noqa: E501 list or watch objects of kind ClusterRole # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_cluster_role_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1ClusterRoleList If the method is called asynchronously, returns the request thread. """ all_params = ['include_uninitialized', 'pretty', '_continue', 'field_selector', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method list_cluster_role" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] if 'include_uninitialized' in params: query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501 if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 if '_continue' in params: query_params.append(('continue', params['_continue'])) # noqa: E501 if 'field_selector' in params: query_params.append(('fieldSelector', params['field_selector'])) # noqa: E501 if 'label_selector' in params: query_params.append(('labelSelector', params['label_selector'])) # noqa: E501 if 'limit' in params: query_params.append(('limit', params['limit'])) # noqa: E501 if 'resource_version' in params: query_params.append(('resourceVersion', params['resource_version'])) # noqa: E501 if 'timeout_seconds' in params: query_params.append(('timeoutSeconds', params['timeout_seconds'])) # noqa: E501 if 'watch' in params: query_params.append(('watch', params['watch'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['*/*']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/clusterroles', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1ClusterRoleList', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def list_cluster_role_binding(self, **kwargs): # noqa: E501 """list_cluster_role_binding # noqa: E501 list or watch objects of kind ClusterRoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_cluster_role_binding(async_req=True) >>> result = thread.get() :param async_req bool :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1ClusterRoleBindingList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.list_cluster_role_binding_with_http_info(**kwargs) # noqa: E501 else: (data) = self.list_cluster_role_binding_with_http_info(**kwargs) # noqa: E501 return data def list_cluster_role_binding_with_http_info(self, **kwargs): # noqa: E501 """list_cluster_role_binding # noqa: E501 list or watch objects of kind ClusterRoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_cluster_role_binding_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1ClusterRoleBindingList If the method is called asynchronously, returns the request thread. """ all_params = ['include_uninitialized', 'pretty', '_continue', 'field_selector', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method list_cluster_role_binding" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] if 'include_uninitialized' in params: query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501 if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 if '_continue' in params: query_params.append(('continue', params['_continue'])) # noqa: E501 if 'field_selector' in params: query_params.append(('fieldSelector', params['field_selector'])) # noqa: E501 if 'label_selector' in params: query_params.append(('labelSelector', params['label_selector'])) # noqa: E501 if 'limit' in params: query_params.append(('limit', params['limit'])) # noqa: E501 if 'resource_version' in params: query_params.append(('resourceVersion', params['resource_version'])) # noqa: E501 if 'timeout_seconds' in params: query_params.append(('timeoutSeconds', params['timeout_seconds'])) # noqa: E501 if 'watch' in params: query_params.append(('watch', params['watch'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['*/*']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/clusterrolebindings', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1ClusterRoleBindingList', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def list_namespaced_role(self, namespace, **kwargs): # noqa: E501 """list_namespaced_role # noqa: E501 list or watch objects of kind Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_namespaced_role(namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str namespace: object name and auth scope, such as for teams and projects (required) :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1RoleList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.list_namespaced_role_with_http_info(namespace, **kwargs) # noqa: E501 else: (data) = self.list_namespaced_role_with_http_info(namespace, **kwargs) # noqa: E501 return data def list_namespaced_role_with_http_info(self, namespace, **kwargs): # noqa: E501 """list_namespaced_role # noqa: E501 list or watch objects of kind Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_namespaced_role_with_http_info(namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str namespace: object name and auth scope, such as for teams and projects (required) :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1RoleList If the method is called asynchronously, returns the request thread. """ all_params = ['namespace', 'include_uninitialized', 'pretty', '_continue', 'field_selector', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method list_namespaced_role" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'namespace' is set if ('namespace' not in params or params['namespace'] is None): raise ValueError("Missing the required parameter `namespace` when calling `list_namespaced_role`") # noqa: E501 collection_formats = {} path_params = {} if 'namespace' in params: path_params['namespace'] = params['namespace'] # noqa: E501 query_params = [] if 'include_uninitialized' in params: query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501 if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 if '_continue' in params: query_params.append(('continue', params['_continue'])) # noqa: E501 if 'field_selector' in params: query_params.append(('fieldSelector', params['field_selector'])) # noqa: E501 if 'label_selector' in params: query_params.append(('labelSelector', params['label_selector'])) # noqa: E501 if 'limit' in params: query_params.append(('limit', params['limit'])) # noqa: E501 if 'resource_version' in params: query_params.append(('resourceVersion', params['resource_version'])) # noqa: E501 if 'timeout_seconds' in params: query_params.append(('timeoutSeconds', params['timeout_seconds'])) # noqa: E501 if 'watch' in params: query_params.append(('watch', params['watch'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['*/*']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/roles', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1RoleList', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def list_namespaced_role_binding(self, namespace, **kwargs): # noqa: E501 """list_namespaced_role_binding # noqa: E501 list or watch objects of kind RoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_namespaced_role_binding(namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str namespace: object name and auth scope, such as for teams and projects (required) :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1RoleBindingList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.list_namespaced_role_binding_with_http_info(namespace, **kwargs) # noqa: E501 else: (data) = self.list_namespaced_role_binding_with_http_info(namespace, **kwargs) # noqa: E501 return data def list_namespaced_role_binding_with_http_info(self, namespace, **kwargs): # noqa: E501 """list_namespaced_role_binding # noqa: E501 list or watch objects of kind RoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_namespaced_role_binding_with_http_info(namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str namespace: object name and auth scope, such as for teams and projects (required) :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1RoleBindingList If the method is called asynchronously, returns the request thread. """ all_params = ['namespace', 'include_uninitialized', 'pretty', '_continue', 'field_selector', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method list_namespaced_role_binding" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'namespace' is set if ('namespace' not in params or params['namespace'] is None): raise ValueError("Missing the required parameter `namespace` when calling `list_namespaced_role_binding`") # noqa: E501 collection_formats = {} path_params = {} if 'namespace' in params: path_params['namespace'] = params['namespace'] # noqa: E501 query_params = [] if 'include_uninitialized' in params: query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501 if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 if '_continue' in params: query_params.append(('continue', params['_continue'])) # noqa: E501 if 'field_selector' in params: query_params.append(('fieldSelector', params['field_selector'])) # noqa: E501 if 'label_selector' in params: query_params.append(('labelSelector', params['label_selector'])) # noqa: E501 if 'limit' in params: query_params.append(('limit', params['limit'])) # noqa: E501 if 'resource_version' in params: query_params.append(('resourceVersion', params['resource_version'])) # noqa: E501 if 'timeout_seconds' in params: query_params.append(('timeoutSeconds', params['timeout_seconds'])) # noqa: E501 if 'watch' in params: query_params.append(('watch', params['watch'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['*/*']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/rolebindings', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1RoleBindingList', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def list_role_binding_for_all_namespaces(self, **kwargs): # noqa: E501 """list_role_binding_for_all_namespaces # noqa: E501 list or watch objects of kind RoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_role_binding_for_all_namespaces(async_req=True) >>> result = thread.get() :param async_req bool :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str pretty: If 'true', then the output is pretty printed. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1RoleBindingList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.list_role_binding_for_all_namespaces_with_http_info(**kwargs) # noqa: E501 else: (data) = self.list_role_binding_for_all_namespaces_with_http_info(**kwargs) # noqa: E501 return data def list_role_binding_for_all_namespaces_with_http_info(self, **kwargs): # noqa: E501 """list_role_binding_for_all_namespaces # noqa: E501 list or watch objects of kind RoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_role_binding_for_all_namespaces_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str pretty: If 'true', then the output is pretty printed. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1RoleBindingList If the method is called asynchronously, returns the request thread. """ all_params = ['_continue', 'field_selector', 'include_uninitialized', 'label_selector', 'limit', 'pretty', 'resource_version', 'timeout_seconds', 'watch'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method list_role_binding_for_all_namespaces" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] if '_continue' in params: query_params.append(('continue', params['_continue'])) # noqa: E501 if 'field_selector' in params: query_params.append(('fieldSelector', params['field_selector'])) # noqa: E501 if 'include_uninitialized' in params: query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501 if 'label_selector' in params: query_params.append(('labelSelector', params['label_selector'])) # noqa: E501 if 'limit' in params: query_params.append(('limit', params['limit'])) # noqa: E501 if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 if 'resource_version' in params: query_params.append(('resourceVersion', params['resource_version'])) # noqa: E501 if 'timeout_seconds' in params: query_params.append(('timeoutSeconds', params['timeout_seconds'])) # noqa: E501 if 'watch' in params: query_params.append(('watch', params['watch'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['*/*']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/rolebindings', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1RoleBindingList', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def list_role_for_all_namespaces(self, **kwargs): # noqa: E501 """list_role_for_all_namespaces # noqa: E501 list or watch objects of kind Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_role_for_all_namespaces(async_req=True) >>> result = thread.get() :param async_req bool :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str pretty: If 'true', then the output is pretty printed. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1RoleList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.list_role_for_all_namespaces_with_http_info(**kwargs) # noqa: E501 else: (data) = self.list_role_for_all_namespaces_with_http_info(**kwargs) # noqa: E501 return data def list_role_for_all_namespaces_with_http_info(self, **kwargs): # noqa: E501 """list_role_for_all_namespaces # noqa: E501 list or watch objects of kind Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_role_for_all_namespaces_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str pretty: If 'true', then the output is pretty printed. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1RoleList If the method is called asynchronously, returns the request thread. """ all_params = ['_continue', 'field_selector', 'include_uninitialized', 'label_selector', 'limit', 'pretty', 'resource_version', 'timeout_seconds', 'watch'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method list_role_for_all_namespaces" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] if '_continue' in params: query_params.append(('continue', params['_continue'])) # noqa: E501 if 'field_selector' in params: query_params.append(('fieldSelector', params['field_selector'])) # noqa: E501 if 'include_uninitialized' in params: query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501 if 'label_selector' in params: query_params.append(('labelSelector', params['label_selector'])) # noqa: E501 if 'limit' in params: query_params.append(('limit', params['limit'])) # noqa: E501 if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 if 'resource_version' in params: query_params.append(('resourceVersion', params['resource_version'])) # noqa: E501 if 'timeout_seconds' in params: query_params.append(('timeoutSeconds', params['timeout_seconds'])) # noqa: E501 if 'watch' in params: query_params.append(('watch', params['watch'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['*/*']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/roles', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1RoleList', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def patch_cluster_role(self, name, body, **kwargs): # noqa: E501 """patch_cluster_role # noqa: E501 partially update the specified ClusterRole # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_cluster_role(name, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the ClusterRole (required) :param object body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1ClusterRole If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.patch_cluster_role_with_http_info(name, body, **kwargs) # noqa: E501 else: (data) = self.patch_cluster_role_with_http_info(name, body, **kwargs) # noqa: E501 return data def patch_cluster_role_with_http_info(self, name, body, **kwargs): # noqa: E501 """patch_cluster_role # noqa: E501 partially update the specified ClusterRole # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_cluster_role_with_http_info(name, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the ClusterRole (required) :param object body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1ClusterRole If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'body', 'pretty', 'dry_run'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method patch_cluster_role" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `patch_cluster_role`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `patch_cluster_role`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 if 'dry_run' in params: query_params.append(('dryRun', params['dry_run'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/clusterroles/{name}', 'PATCH', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1ClusterRole', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def patch_cluster_role_binding(self, name, body, **kwargs): # noqa: E501 """patch_cluster_role_binding # noqa: E501 partially update the specified ClusterRoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_cluster_role_binding(name, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the ClusterRoleBinding (required) :param object body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1ClusterRoleBinding If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.patch_cluster_role_binding_with_http_info(name, body, **kwargs) # noqa: E501 else: (data) = self.patch_cluster_role_binding_with_http_info(name, body, **kwargs) # noqa: E501 return data def patch_cluster_role_binding_with_http_info(self, name, body, **kwargs): # noqa: E501 """patch_cluster_role_binding # noqa: E501 partially update the specified ClusterRoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_cluster_role_binding_with_http_info(name, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the ClusterRoleBinding (required) :param object body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1ClusterRoleBinding If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'body', 'pretty', 'dry_run'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method patch_cluster_role_binding" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `patch_cluster_role_binding`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `patch_cluster_role_binding`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 if 'dry_run' in params: query_params.append(('dryRun', params['dry_run'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/{name}', 'PATCH', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1ClusterRoleBinding', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def patch_namespaced_role(self, name, namespace, body, **kwargs): # noqa: E501 """patch_namespaced_role # noqa: E501 partially update the specified Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_namespaced_role(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Role (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param object body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1Role If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.patch_namespaced_role_with_http_info(name, namespace, body, **kwargs) # noqa: E501 else: (data) = self.patch_namespaced_role_with_http_info(name, namespace, body, **kwargs) # noqa: E501 return data def patch_namespaced_role_with_http_info(self, name, namespace, body, **kwargs): # noqa: E501 """patch_namespaced_role # noqa: E501 partially update the specified Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_namespaced_role_with_http_info(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Role (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param object body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1Role If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'namespace', 'body', 'pretty', 'dry_run'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method patch_namespaced_role" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_role`") # noqa: E501 # verify the required parameter 'namespace' is set if ('namespace' not in params or params['namespace'] is None): raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_role`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_role`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 if 'namespace' in params: path_params['namespace'] = params['namespace'] # noqa: E501 query_params = [] if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 if 'dry_run' in params: query_params.append(('dryRun', params['dry_run'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/roles/{name}', 'PATCH', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Role', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def patch_namespaced_role_binding(self, name, namespace, body, **kwargs): # noqa: E501 """patch_namespaced_role_binding # noqa: E501 partially update the specified RoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_namespaced_role_binding(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the RoleBinding (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param object body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1RoleBinding If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.patch_namespaced_role_binding_with_http_info(name, namespace, body, **kwargs) # noqa: E501 else: (data) = self.patch_namespaced_role_binding_with_http_info(name, namespace, body, **kwargs) # noqa: E501 return data def patch_namespaced_role_binding_with_http_info(self, name, namespace, body, **kwargs): # noqa: E501 """patch_namespaced_role_binding # noqa: E501 partially update the specified RoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_namespaced_role_binding_with_http_info(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the RoleBinding (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param object body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1RoleBinding If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'namespace', 'body', 'pretty', 'dry_run'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method patch_namespaced_role_binding" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_role_binding`") # noqa: E501 # verify the required parameter 'namespace' is set if ('namespace' not in params or params['namespace'] is None): raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_role_binding`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_role_binding`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 if 'namespace' in params: path_params['namespace'] = params['namespace'] # noqa: E501 query_params = [] if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 if 'dry_run' in params: query_params.append(('dryRun', params['dry_run'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/rolebindings/{name}', 'PATCH', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1RoleBinding', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def read_cluster_role(self, name, **kwargs): # noqa: E501 """read_cluster_role # noqa: E501 read the specified ClusterRole # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.read_cluster_role(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the ClusterRole (required) :param str pretty: If 'true', then the output is pretty printed. :return: V1ClusterRole If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.read_cluster_role_with_http_info(name, **kwargs) # noqa: E501 else: (data) = self.read_cluster_role_with_http_info(name, **kwargs) # noqa: E501 return data def read_cluster_role_with_http_info(self, name, **kwargs): # noqa: E501 """read_cluster_role # noqa: E501 read the specified ClusterRole # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.read_cluster_role_with_http_info(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the ClusterRole (required) :param str pretty: If 'true', then the output is pretty printed. :return: V1ClusterRole If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'pretty'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method read_cluster_role" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `read_cluster_role`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['*/*']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/clusterroles/{name}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1ClusterRole', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def read_cluster_role_binding(self, name, **kwargs): # noqa: E501 """read_cluster_role_binding # noqa: E501 read the specified ClusterRoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.read_cluster_role_binding(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the ClusterRoleBinding (required) :param str pretty: If 'true', then the output is pretty printed. :return: V1ClusterRoleBinding If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.read_cluster_role_binding_with_http_info(name, **kwargs) # noqa: E501 else: (data) = self.read_cluster_role_binding_with_http_info(name, **kwargs) # noqa: E501 return data def read_cluster_role_binding_with_http_info(self, name, **kwargs): # noqa: E501 """read_cluster_role_binding # noqa: E501 read the specified ClusterRoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.read_cluster_role_binding_with_http_info(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the ClusterRoleBinding (required) :param str pretty: If 'true', then the output is pretty printed. :return: V1ClusterRoleBinding If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'pretty'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method read_cluster_role_binding" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `read_cluster_role_binding`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['*/*']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/{name}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1ClusterRoleBinding', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def read_namespaced_role(self, name, namespace, **kwargs): # noqa: E501 """read_namespaced_role # noqa: E501 read the specified Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.read_namespaced_role(name, namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Role (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param str pretty: If 'true', then the output is pretty printed. :return: V1Role If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.read_namespaced_role_with_http_info(name, namespace, **kwargs) # noqa: E501 else: (data) = self.read_namespaced_role_with_http_info(name, namespace, **kwargs) # noqa: E501 return data def read_namespaced_role_with_http_info(self, name, namespace, **kwargs): # noqa: E501 """read_namespaced_role # noqa: E501 read the specified Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.read_namespaced_role_with_http_info(name, namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Role (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param str pretty: If 'true', then the output is pretty printed. :return: V1Role If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'namespace', 'pretty'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method read_namespaced_role" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `read_namespaced_role`") # noqa: E501 # verify the required parameter 'namespace' is set if ('namespace' not in params or params['namespace'] is None): raise ValueError("Missing the required parameter `namespace` when calling `read_namespaced_role`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 if 'namespace' in params: path_params['namespace'] = params['namespace'] # noqa: E501 query_params = [] if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['*/*']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/roles/{name}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Role', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def read_namespaced_role_binding(self, name, namespace, **kwargs): # noqa: E501 """read_namespaced_role_binding # noqa: E501 read the specified RoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.read_namespaced_role_binding(name, namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the RoleBinding (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param str pretty: If 'true', then the output is pretty printed. :return: V1RoleBinding If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.read_namespaced_role_binding_with_http_info(name, namespace, **kwargs) # noqa: E501 else: (data) = self.read_namespaced_role_binding_with_http_info(name, namespace, **kwargs) # noqa: E501 return data def read_namespaced_role_binding_with_http_info(self, name, namespace, **kwargs): # noqa: E501 """read_namespaced_role_binding # noqa: E501 read the specified RoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.read_namespaced_role_binding_with_http_info(name, namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the RoleBinding (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param str pretty: If 'true', then the output is pretty printed. :return: V1RoleBinding If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'namespace', 'pretty'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method read_namespaced_role_binding" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `read_namespaced_role_binding`") # noqa: E501 # verify the required parameter 'namespace' is set if ('namespace' not in params or params['namespace'] is None): raise ValueError("Missing the required parameter `namespace` when calling `read_namespaced_role_binding`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 if 'namespace' in params: path_params['namespace'] = params['namespace'] # noqa: E501 query_params = [] if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['*/*']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/rolebindings/{name}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1RoleBinding', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def replace_cluster_role(self, name, body, **kwargs): # noqa: E501 """replace_cluster_role # noqa: E501 replace the specified ClusterRole # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_cluster_role(name, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the ClusterRole (required) :param V1ClusterRole body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1ClusterRole If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.replace_cluster_role_with_http_info(name, body, **kwargs) # noqa: E501 else: (data) = self.replace_cluster_role_with_http_info(name, body, **kwargs) # noqa: E501 return data def replace_cluster_role_with_http_info(self, name, body, **kwargs): # noqa: E501 """replace_cluster_role # noqa: E501 replace the specified ClusterRole # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_cluster_role_with_http_info(name, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the ClusterRole (required) :param V1ClusterRole body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1ClusterRole If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'body', 'pretty', 'dry_run'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method replace_cluster_role" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `replace_cluster_role`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `replace_cluster_role`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 if 'dry_run' in params: query_params.append(('dryRun', params['dry_run'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['*/*']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/clusterroles/{name}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1ClusterRole', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def replace_cluster_role_binding(self, name, body, **kwargs): # noqa: E501 """replace_cluster_role_binding # noqa: E501 replace the specified ClusterRoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_cluster_role_binding(name, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the ClusterRoleBinding (required) :param V1ClusterRoleBinding body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1ClusterRoleBinding If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.replace_cluster_role_binding_with_http_info(name, body, **kwargs) # noqa: E501 else: (data) = self.replace_cluster_role_binding_with_http_info(name, body, **kwargs) # noqa: E501 return data def replace_cluster_role_binding_with_http_info(self, name, body, **kwargs): # noqa: E501 """replace_cluster_role_binding # noqa: E501 replace the specified ClusterRoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_cluster_role_binding_with_http_info(name, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the ClusterRoleBinding (required) :param V1ClusterRoleBinding body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1ClusterRoleBinding If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'body', 'pretty', 'dry_run'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method replace_cluster_role_binding" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `replace_cluster_role_binding`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `replace_cluster_role_binding`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 if 'dry_run' in params: query_params.append(('dryRun', params['dry_run'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['*/*']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/{name}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1ClusterRoleBinding', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def replace_namespaced_role(self, name, namespace, body, **kwargs): # noqa: E501 """replace_namespaced_role # noqa: E501 replace the specified Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_namespaced_role(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Role (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param V1Role body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1Role If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.replace_namespaced_role_with_http_info(name, namespace, body, **kwargs) # noqa: E501 else: (data) = self.replace_namespaced_role_with_http_info(name, namespace, body, **kwargs) # noqa: E501 return data def replace_namespaced_role_with_http_info(self, name, namespace, body, **kwargs): # noqa: E501 """replace_namespaced_role # noqa: E501 replace the specified Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_namespaced_role_with_http_info(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Role (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param V1Role body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1Role If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'namespace', 'body', 'pretty', 'dry_run'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method replace_namespaced_role" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_role`") # noqa: E501 # verify the required parameter 'namespace' is set if ('namespace' not in params or params['namespace'] is None): raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_role`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_role`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 if 'namespace' in params: path_params['namespace'] = params['namespace'] # noqa: E501 query_params = [] if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 if 'dry_run' in params: query_params.append(('dryRun', params['dry_run'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['*/*']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/roles/{name}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Role', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def replace_namespaced_role_binding(self, name, namespace, body, **kwargs): # noqa: E501 """replace_namespaced_role_binding # noqa: E501 replace the specified RoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_namespaced_role_binding(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the RoleBinding (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param V1RoleBinding body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1RoleBinding If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.replace_namespaced_role_binding_with_http_info(name, namespace, body, **kwargs) # noqa: E501 else: (data) = self.replace_namespaced_role_binding_with_http_info(name, namespace, body, **kwargs) # noqa: E501 return data def replace_namespaced_role_binding_with_http_info(self, name, namespace, body, **kwargs): # noqa: E501 """replace_namespaced_role_binding # noqa: E501 replace the specified RoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_namespaced_role_binding_with_http_info(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the RoleBinding (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param V1RoleBinding body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1RoleBinding If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'namespace', 'body', 'pretty', 'dry_run'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method replace_namespaced_role_binding" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_role_binding`") # noqa: E501 # verify the required parameter 'namespace' is set if ('namespace' not in params or params['namespace'] is None): raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_role_binding`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_role_binding`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 if 'namespace' in params: path_params['namespace'] = params['namespace'] # noqa: E501 query_params = [] if 'pretty' in params: query_params.append(('pretty', params['pretty'])) # noqa: E501 if 'dry_run' in params: query_params.append(('dryRun', params['dry_run'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['*/*']) # noqa: E501 # Authentication setting auth_settings = ['BearerToken'] # noqa: E501 return self.api_client.call_api( '/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/rolebindings/{name}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1RoleBinding', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
[((87, 24, 87, 55), 'six.iteritems', 'six.iteritems', ({(87, 38, 87, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((198, 24, 198, 55), 'six.iteritems', 'six.iteritems', ({(198, 38, 198, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((311, 24, 311, 55), 'six.iteritems', 'six.iteritems', ({(311, 38, 311, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((430, 24, 430, 55), 'six.iteritems', 'six.iteritems', ({(430, 38, 430, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((553, 24, 553, 55), 'six.iteritems', 'six.iteritems', ({(553, 38, 553, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((680, 24, 680, 55), 'six.iteritems', 'six.iteritems', ({(680, 38, 680, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((811, 24, 811, 55), 'six.iteritems', 'six.iteritems', ({(811, 38, 811, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((938, 24, 938, 55), 'six.iteritems', 'six.iteritems', ({(938, 38, 938, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((1067, 24, 1067, 55), 'six.iteritems', 'six.iteritems', ({(1067, 38, 1067, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((1202, 24, 1202, 55), 'six.iteritems', 'six.iteritems', ({(1202, 38, 1202, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((1333, 24, 1333, 55), 'six.iteritems', 'six.iteritems', ({(1333, 38, 1333, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((1468, 24, 1468, 55), 'six.iteritems', 'six.iteritems', ({(1468, 38, 1468, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((1587, 24, 1587, 55), 'six.iteritems', 'six.iteritems', ({(1587, 38, 1587, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((1696, 24, 1696, 55), 'six.iteritems', 'six.iteritems', ({(1696, 38, 1696, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((1823, 24, 1823, 55), 'six.iteritems', 'six.iteritems', ({(1823, 38, 1823, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((1952, 24, 1952, 55), 'six.iteritems', 'six.iteritems', ({(1952, 38, 1952, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((2087, 24, 2087, 55), 'six.iteritems', 'six.iteritems', ({(2087, 38, 2087, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((2220, 24, 2220, 55), 'six.iteritems', 'six.iteritems', ({(2220, 38, 2220, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((2347, 24, 2347, 55), 'six.iteritems', 'six.iteritems', ({(2347, 38, 2347, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((2464, 24, 2464, 55), 'six.iteritems', 'six.iteritems', ({(2464, 38, 2464, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((2579, 24, 2579, 55), 'six.iteritems', 'six.iteritems', ({(2579, 38, 2579, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((2696, 24, 2696, 55), 'six.iteritems', 'six.iteritems', ({(2696, 38, 2696, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((2819, 24, 2819, 55), 'six.iteritems', 'six.iteritems', ({(2819, 38, 2819, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((2936, 24, 2936, 55), 'six.iteritems', 'six.iteritems', ({(2936, 38, 2936, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((3039, 24, 3039, 55), 'six.iteritems', 'six.iteritems', ({(3039, 38, 3039, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((3144, 24, 3144, 55), 'six.iteritems', 'six.iteritems', ({(3144, 38, 3144, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((3255, 24, 3255, 55), 'six.iteritems', 'six.iteritems', ({(3255, 38, 3255, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((3368, 24, 3368, 55), 'six.iteritems', 'six.iteritems', ({(3368, 38, 3368, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((3483, 24, 3483, 55), 'six.iteritems', 'six.iteritems', ({(3483, 38, 3483, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((3600, 24, 3600, 55), 'six.iteritems', 'six.iteritems', ({(3600, 38, 3600, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((3723, 24, 3723, 55), 'six.iteritems', 'six.iteritems', ({(3723, 38, 3723, 54): "params['kwargs']"}, {}), "(params['kwargs'])", False, 'import six\n'), ((33, 25, 33, 36), 'kubernetes_asyncio.client.api_client.ApiClient', 'ApiClient', ({}, {}), '()', False, 'from kubernetes_asyncio.client.api_client import ApiClient\n')]
vahini01/electoral_rolls
tools/utils.py
82e42a6ee68844b1c8ac7899e8e7bf7a24e48d44
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Fri Nov 10 23:28:58 2017 @author: dhingratul """ import urllib.request import os from selenium import webdriver from selenium.webdriver.support.ui import Select from bs4 import BeautifulSoup import ssl import requests import wget from PyPDF2 import PdfFileReader def download_file(pdf_url, mdir, filename, flag=False): if flag is True: context = ssl._create_unverified_context() response = urllib.request.urlopen(pdf_url, context=context) else: response = urllib.request.urlopen(pdf_url) filename = mdir + filename file = open(filename, 'wb') file.write(response.read()) if os.stat(filename).st_size == 0: flag = 0 else: flag = 1 file.close() return flag def download_file_R(pdf_url, mdir, filename, file_out): requests.packages.urllib3.disable_warnings() while True: # Keep trying until the webpage successfully downloads try: r = requests.get(pdf_url, verify=False, timeout=10) break # If it downloads, get out and get on with life # If it doesn't download after the timeout period, an exceptions is thrown, and we try again except requests.exceptions.RequestException as e: with open(file_out, "a") as myfile: myfile.write(pdf_url + '\n') filename = mdir + filename with open(filename, 'wb') as f: f.write(r.content) if os.stat(filename).st_size == 0: flag = 0 else: flag = 1 return flag def download_file_W(pdf_url, mdir, filename, flag=False): filename = mdir + filename ssl._create_default_https_context = ssl._create_unverified_context wget.download(pdf_url, filename) if os.stat(filename).st_size == 0: flag = 0 else: flag = 1 return flag def getDriver(url): driver = webdriver.Chrome() driver.get(url) return driver def is_valid_pdf(fn): """Check is the PDF valid """ try: with open(fn, 'rb') as f: pdf = PdfFileReader(f) numpages = pdf.numPages return (numpages > 0) except Exception as e: return False
[((38, 4, 38, 48), 'requests.packages.urllib3.disable_warnings', 'requests.packages.urllib3.disable_warnings', ({}, {}), '()', False, 'import requests\n'), ((62, 4, 62, 36), 'wget.download', 'wget.download', ({(62, 18, 62, 25): 'pdf_url', (62, 27, 62, 35): 'filename'}, {}), '(pdf_url, filename)', False, 'import wget\n'), ((71, 13, 71, 31), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ({}, {}), '()', False, 'from selenium import webdriver\n'), ((21, 18, 21, 50), 'ssl._create_unverified_context', 'ssl._create_unverified_context', ({}, {}), '()', False, 'import ssl\n'), ((29, 7, 29, 24), 'os.stat', 'os.stat', ({(29, 15, 29, 23): 'filename'}, {}), '(filename)', False, 'import os\n'), ((41, 16, 41, 63), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((51, 7, 51, 24), 'os.stat', 'os.stat', ({(51, 15, 51, 23): 'filename'}, {}), '(filename)', False, 'import os\n'), ((63, 7, 63, 24), 'os.stat', 'os.stat', ({(63, 15, 63, 23): 'filename'}, {}), '(filename)', False, 'import os\n'), ((80, 18, 80, 34), 'PyPDF2.PdfFileReader', 'PdfFileReader', ({(80, 32, 80, 33): 'f'}, {}), '(f)', False, 'from PyPDF2 import PdfFileReader\n')]
ellencwade/coronavirus-2020
exp/viz_raw_manhattan.py
b71e018deb8df8450b4d88ddbcd6ded6497aa8f9
""" Experiment summary ------------------ Treat each province/state in a country cases over time as a vector, do a simple K-Nearest Neighbor between countries. What country has the most similar trajectory to a given country? Plots similar countries """ import sys sys.path.insert(0, '..') from utils import data import os import sklearn import numpy as np import json import matplotlib.pyplot as plt plt.style.use('fivethirtyeight') # ------------ HYPERPARAMETERS ------------- BASE_PATH = '../COVID-19/csse_covid_19_data/' # ------------------------------------------ confirmed = os.path.join( BASE_PATH, 'csse_covid_19_time_series', 'time_series_covid19_confirmed_global.csv') confirmed = data.load_csv_data(confirmed) features = [] targets = [] fig = plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) cm = plt.get_cmap('jet') NUM_COLORS = 0 LINE_STYLES = ['solid', 'dashed', 'dotted'] NUM_STYLES = len(LINE_STYLES) dist_diff = os.path.join('../exp/results/', 'knn_raw.json') f = open(dist_diff,) dist_diff = json.load(f) for region, dist in dist_diff.items(): plt.style.use('fivethirtyeight') fig = plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) cm = plt.get_cmap('jet') other_region = dist['manhattan'][0] regions = [region, other_region] for val in regions: df = data.filter_by_attribute( confirmed, "Country/Region", val) cases, labels = data.get_cases_chronologically(df) cases = cases.sum(axis=0) lines = ax.plot(cases, label=val) ax.set_ylabel('# of confirmed cases') ax.set_xlabel("Time (days since Jan 22, 2020)") ax.set_yscale('log') ax.legend() plt.tight_layout() region = region.replace('*', '') other_region = other_region.replace('*', '') plt.title(f'Comparing confirmed cases in {region} and {other_region}') plt.savefig(f'results/raw_manhattan/{region}.png') plt.close() print(region)
[((13, 0, 13, 24), 'sys.path.insert', 'sys.path.insert', ({(13, 16, 13, 17): '(0)', (13, 19, 13, 23): '""".."""'}, {}), "(0, '..')", False, 'import sys\n'), ((22, 0, 22, 32), 'matplotlib.pyplot.style.use', 'plt.style.use', ({(22, 14, 22, 31): '"""fivethirtyeight"""'}, {}), "('fivethirtyeight')", True, 'import matplotlib.pyplot as plt\n'), ((28, 12, 31, 47), 'os.path.join', 'os.path.join', ({(29, 4, 29, 13): 'BASE_PATH', (30, 4, 30, 31): '"""csse_covid_19_time_series"""', (31, 4, 31, 46): '"""time_series_covid19_confirmed_global.csv"""'}, {}), "(BASE_PATH, 'csse_covid_19_time_series',\n 'time_series_covid19_confirmed_global.csv')", False, 'import os\n'), ((32, 12, 32, 41), 'utils.data.load_csv_data', 'data.load_csv_data', ({(32, 31, 32, 40): 'confirmed'}, {}), '(confirmed)', False, 'from utils import data\n'), ((36, 6, 36, 34), 'matplotlib.pyplot.figure', 'plt.figure', (), '', True, 'import matplotlib.pyplot as plt\n'), ((38, 5, 38, 24), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', ({(38, 18, 38, 23): '"""jet"""'}, {}), "('jet')", True, 'import matplotlib.pyplot as plt\n'), ((43, 12, 43, 59), 'os.path.join', 'os.path.join', ({(43, 25, 43, 42): '"""../exp/results/"""', (43, 44, 43, 58): '"""knn_raw.json"""'}, {}), "('../exp/results/', 'knn_raw.json')", False, 'import os\n'), ((45, 12, 45, 24), 'json.load', 'json.load', ({(45, 22, 45, 23): 'f'}, {}), '(f)', False, 'import json\n'), ((48, 4, 48, 36), 'matplotlib.pyplot.style.use', 'plt.style.use', ({(48, 18, 48, 35): '"""fivethirtyeight"""'}, {}), "('fivethirtyeight')", True, 'import matplotlib.pyplot as plt\n'), ((50, 10, 50, 38), 'matplotlib.pyplot.figure', 'plt.figure', (), '', True, 'import matplotlib.pyplot as plt\n'), ((52, 9, 52, 28), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', ({(52, 22, 52, 27): '"""jet"""'}, {}), "('jet')", True, 'import matplotlib.pyplot as plt\n'), ((68, 4, 68, 22), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((71, 4, 71, 74), 'matplotlib.pyplot.title', 'plt.title', ({(71, 14, 71, 73): 'f"""Comparing confirmed cases in {region} and {other_region}"""'}, {}), "(f'Comparing confirmed cases in {region} and {other_region}')", True, 'import matplotlib.pyplot as plt\n'), ((72, 4, 72, 54), 'matplotlib.pyplot.savefig', 'plt.savefig', ({(72, 16, 72, 53): 'f"""results/raw_manhattan/{region}.png"""'}, {}), "(f'results/raw_manhattan/{region}.png')", True, 'import matplotlib.pyplot as plt\n'), ((73, 4, 73, 15), 'matplotlib.pyplot.close', 'plt.close', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((57, 13, 58, 45), 'utils.data.filter_by_attribute', 'data.filter_by_attribute', ({(58, 12, 58, 21): 'confirmed', (58, 23, 58, 39): '"""Country/Region"""', (58, 41, 58, 44): 'val'}, {}), "(confirmed, 'Country/Region', val)", False, 'from utils import data\n'), ((59, 24, 59, 58), 'utils.data.get_cases_chronologically', 'data.get_cases_chronologically', ({(59, 55, 59, 57): 'df'}, {}), '(df)', False, 'from utils import data\n')]
steven-lang/rational_activations
rational/mxnet/rationals.py
234623dbb9360c215c430185b09e2237d5186b54
""" Rational Activation Functions for MXNET ======================================= This module allows you to create Rational Neural Networks using Learnable Rational activation functions with MXNET networks. """ import mxnet as mx from mxnet import initializer from mxnet.gluon import HybridBlock from rational.utils.get_weights import get_parameters from rational.mxnet.versions import _version_a, _version_b, _version_c, _version_d from rational._base.rational_base import Rational_base class Rational(Rational_base, HybridBlock): """ Rational Activation Function, inheriting from ``mxnet.gluon.HybridBlock``. Arguments: approx_func (str): The name of the approximated function for initialisation. The different functions are available in `rational.rationals_config.json`. Default: ``leaky_relu`` degrees (tuple of int): The degrees of the numerator (P) and denominator (Q). Default ``(5, 4)`` cuda (bool): whether to execute on cuda device. NOTE: THIS PARAMETER IS CURRENTLY NOT CONSIDERED. CUDA GPUS ARE USED WHEN IT IS POSSIBLE version (str): Version of Rational to use. Rational(x) = P(x)/Q(x), where P(x) = (a_0 + a_1 * x + a_2 * x^2 + ... + a_n * x^n) and `A`: Q(x) = (1 + |b_0 * x| + | b_1 * x^2| + ... + | b_m * x^{m+1}|) `B`: Q(x) = (1 + |b_0 * x + b_1 * x^2 + ... + b_m * x^{m + 1}|) `C`: Q(x) = (0.1 + |b_0 + b_1 * x + b_2 * x^2 + ... + b_m * x^m|) `D`: like `B` with noised coefficients b_i Default ``A`` trainable (bool): Whether the weights are trainable, i.e, if they are updated during backward pass. Default ``True`` Returns: HybridBlock: Rational hybrid block """ def __init__(self, approx_func='leaky_relu', degrees=(5, 4), cuda=False, version='A', trainable=True, **kwargs): super(Rational, self).__init__(**kwargs) # read initial parameter configuration from external files w_numerator, w_denominator = get_parameters( version, degrees, approx_func) # convert w_numerator and w_denominator to mxnet arrays w_numerator = mx.nd.array(w_numerator) w_denominator = mx.nd.array(w_denominator) # register the amount of weights in numerator and denominator, since we need them during # symbolic execution, but are unable to retrieve them at later stages self.numerator_length = len(w_numerator) self.denominator_length = len(w_denominator) self.training = trainable self.degrees = degrees self.version = version self.init_approximation = approx_func # set specified context (currently not happening, since unclear, how and why helpful) # self.device = gpu() if cuda else cpu() # register and configure weights (numerator and denominator coefficients) with self.name_scope(): self.numerator = self.params.get(name='w_numerator', shape=(len(w_numerator),), init=initializer.Constant( w_numerator), grad_req='write' if trainable else 'null', differentiable=trainable) self.denominator = self.params.get(name='w_denominator', shape=(len(w_denominator),), init=initializer.Constant( w_denominator), grad_req='write' if trainable else 'null', differentiable=trainable) # register whether function is trainable, since this information needs to be passed to # version D self.training = trainable self.init_approximation = approx_func # set rational activation function version self.rational_func = {'A': _version_a, 'B': _version_b, 'C': _version_c, 'D': _version_d} \ .get(version) if self.rational_func is None: raise ValueError( "rational activation function version %s not implemented" % version) def hybrid_forward(self, F, x, numerator, denominator): return self.rational_func(F, x, numerator, denominator, self.training, self.numerator_length, self.denominator_length) def numpy(self): """ Returns a numpy version of this activation function. """ from rational.numpy import Rational as Rational_numpy rational_n = Rational_numpy(self.init_approximation, self.degrees, self.version) rational_n.numerator = self.numerator.data().asnumpy().tolist() rational_n.denominator = self.denominator.data().asnumpy().tolist() return rational_n
[((63, 37, 64, 42), 'rational.utils.get_weights.get_parameters', 'get_parameters', ({(64, 12, 64, 19): 'version', (64, 21, 64, 28): 'degrees', (64, 30, 64, 41): 'approx_func'}, {}), '(version, degrees, approx_func)', False, 'from rational.utils.get_weights import get_parameters\n'), ((67, 22, 67, 46), 'mxnet.nd.array', 'mx.nd.array', ({(67, 34, 67, 45): 'w_numerator'}, {}), '(w_numerator)', True, 'import mxnet as mx\n'), ((68, 24, 68, 50), 'mxnet.nd.array', 'mx.nd.array', ({(68, 36, 68, 49): 'w_denominator'}, {}), '(w_denominator)', True, 'import mxnet as mx\n'), ((119, 21, 120, 49), 'rational.numpy.Rational', 'Rational_numpy', ({(119, 36, 119, 59): 'self.init_approximation', (119, 61, 119, 73): 'self.degrees', (120, 36, 120, 48): 'self.version'}, {}), '(self.init_approximation, self.degrees, self.version)', True, 'from rational.numpy import Rational as Rational_numpy\n'), ((85, 50, 86, 61), 'mxnet.initializer.Constant', 'initializer.Constant', ({(86, 49, 86, 60): 'w_numerator'}, {}), '(w_numerator)', False, 'from mxnet import initializer\n'), ((91, 52, 92, 65), 'mxnet.initializer.Constant', 'initializer.Constant', ({(92, 51, 92, 64): 'w_denominator'}, {}), '(w_denominator)', False, 'from mxnet import initializer\n')]
Neklaustares-tPtwP/torchflare
torchflare/criterion/utils.py
7af6b01ef7c26f0277a041619081f6df4eb1e42c
"""Utils for criterion.""" import torch import torch.nn.functional as F def normalize(x, axis=-1): """Performs L2-Norm.""" num = x denom = torch.norm(x, 2, axis, keepdim=True).expand_as(x) + 1e-12 return num / denom # Source : https://github.com/earhian/Humpback-Whale-Identification-1st-/blob/master/models/triplet_loss.py def euclidean_dist(x, y): """Computes Euclidean distance.""" m, n = x.size(0), y.size(0) xx = torch.pow(x, 2).sum(1, keepdim=True).expand(m, n) yy = torch.pow(x, 2).sum(1, keepdim=True).expand(m, m).t() dist = xx + yy - 2 * torch.matmul(x, y.t()) dist = dist.clamp(min=1e-12).sqrt() return dist def cosine_dist(x, y): """Computes Cosine Distance.""" x = F.normalize(x, dim=1) y = F.normalize(y, dim=1) dist = 2 - 2 * torch.mm(x, y.t()) return dist
[((28, 8, 28, 29), 'torch.nn.functional.normalize', 'F.normalize', (), '', True, 'import torch.nn.functional as F\n'), ((29, 8, 29, 29), 'torch.nn.functional.normalize', 'F.normalize', (), '', True, 'import torch.nn.functional as F\n'), ((9, 12, 9, 48), 'torch.norm', 'torch.norm', (), '', False, 'import torch\n'), ((17, 9, 17, 24), 'torch.pow', 'torch.pow', ({(17, 19, 17, 20): 'x', (17, 22, 17, 23): '2'}, {}), '(x, 2)', False, 'import torch\n'), ((18, 9, 18, 24), 'torch.pow', 'torch.pow', ({(18, 19, 18, 20): 'x', (18, 22, 18, 23): '2'}, {}), '(x, 2)', False, 'import torch\n')]
eloo/sensor.sbahn_munich
tests/__init__.py
05e05a845178ab529dc4c80e924035fe1d072b55
"""Tests for the sbahn_munich integration""" line_dict = { "name": "S3", "color": "#333333", "text_color": "#444444", }
[]
geudrik/hautomation
app/views/web/homestack.py
0baae29e85cd68658a0f8578de2e36e42945053f
#! /usr/bin/env python2.7 # -*- coding: latin-1 -*- from flask import Blueprint from flask import current_app from flask import render_template from flask_login import login_required homestack = Blueprint("homestack", __name__, url_prefix="/homestack") @homestack.route("/", methods=["GET"]) @login_required def home(): return render_template("homestack/home.html")
[((10, 12, 10, 69), 'flask.Blueprint', 'Blueprint', (), '', False, 'from flask import Blueprint\n'), ((16, 11, 16, 49), 'flask.render_template', 'render_template', ({(16, 27, 16, 48): '"""homestack/home.html"""'}, {}), "('homestack/home.html')", False, 'from flask import render_template\n')]
gamearming/readthedocs
readthedocs/donate/forms.py
53d0094f657f549326a86b8bd0ccf924c2126941
"""Forms for RTD donations""" import logging from django import forms from django.conf import settings from django.utils.translation import ugettext_lazy as _ from readthedocs.payments.forms import StripeModelForm, StripeResourceMixin from readthedocs.payments.utils import stripe from .models import Supporter log = logging.getLogger(__name__) class SupporterForm(StripeResourceMixin, StripeModelForm): """Donation support sign up form This extends the basic payment form, giving fields for credit card number, expiry, and CVV. The proper Knockout data bindings are established on :py:class:`StripeModelForm` """ class Meta: model = Supporter fields = ( 'last_4_digits', 'name', 'email', 'dollars', 'logo_url', 'site_url', 'public', ) labels = { 'public': _('Make this donation public'), } help_texts = { 'public': _('Your name and image will be displayed on the donation page'), 'email': _('Your email is used for Gravatar and so we can send you a receipt'), 'logo_url': _("URL of your company's logo, images should be 300x300 pixels or less"), 'dollars': _('Companies donating over $400 can specify a logo URL and site link'), } widgets = { 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'logo_url': forms.TextInput(attrs={ 'data-bind': 'value: logo_url, enable: urls_enabled' }), 'site_url': forms.TextInput(attrs={ 'data-bind': 'value: site_url, enable: urls_enabled' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits, value: card_digits' }), } last_4_digits = forms.CharField(widget=forms.HiddenInput(), required=True) name = forms.CharField(required=True) email = forms.CharField(required=True) def __init__(self, *args, **kwargs): self.user = kwargs.pop('user') super(SupporterForm, self).__init__(*args, **kwargs) def validate_stripe(self): """Call stripe for payment (not ideal here) and clean up logo < $200""" dollars = self.cleaned_data['dollars'] if dollars < 200: self.cleaned_data['logo_url'] = None self.cleaned_data['site_url'] = None stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the Docs Sustained Engineering', receipt_email=self.cleaned_data['email'] ) def save(self, commit=True): supporter = super(SupporterForm, self).save(commit) if commit and self.user is not None and self.user.is_authenticated(): supporter.user = self.user supporter.save() return supporter class EthicalAdForm(StripeResourceMixin, StripeModelForm): """Payment form for ethical ads This extends the basic payment form, giving fields for credit card number, expiry, and CVV. The proper Knockout data bindings are established on :py:class:`StripeModelForm` """ class Meta: model = Supporter fields = ( 'last_4_digits', 'name', 'email', 'dollars', ) help_texts = { 'email': _('Your email is used so we can send you a receipt'), } widgets = { 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits, value: card_digits' }), } last_4_digits = forms.CharField(widget=forms.HiddenInput(), required=True) name = forms.CharField(required=True) email = forms.CharField(required=True) def validate_stripe(self): stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the Docs Sponsorship Payment', receipt_email=self.cleaned_data['email'] )
[((14, 6, 14, 33), 'logging.getLogger', 'logging.getLogger', ({(14, 24, 14, 32): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((62, 11, 62, 41), 'django.forms.CharField', 'forms.CharField', (), '', False, 'from django import forms\n'), ((63, 12, 63, 42), 'django.forms.CharField', 'forms.CharField', (), '', False, 'from django import forms\n'), ((121, 11, 121, 41), 'django.forms.CharField', 'forms.CharField', (), '', False, 'from django import forms\n'), ((122, 12, 122, 42), 'django.forms.CharField', 'forms.CharField', (), '', False, 'from django import forms\n'), ((38, 22, 38, 52), 'django.utils.translation.ugettext_lazy', '_', ({(38, 24, 38, 51): '"""Make this donation public"""'}, {}), "('Make this donation public')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((41, 22, 41, 85), 'django.utils.translation.ugettext_lazy', '_', ({(41, 24, 41, 84): '"""Your name and image will be displayed on the donation page"""'}, {}), "('Your name and image will be displayed on the donation page')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((42, 21, 42, 90), 'django.utils.translation.ugettext_lazy', '_', ({(42, 23, 42, 89): '"""Your email is used for Gravatar and so we can send you a receipt"""'}, {}), "('Your email is used for Gravatar and so we can send you a receipt')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((43, 24, 43, 96), 'django.utils.translation.ugettext_lazy', '_', ({(43, 26, 43, 95): '"""URL of your company\'s logo, images should be 300x300 pixels or less"""'}, {}), '("URL of your company\'s logo, images should be 300x300 pixels or less")', True, 'from django.utils.translation import ugettext_lazy as _\n'), ((44, 23, 44, 93), 'django.utils.translation.ugettext_lazy', '_', ({(44, 25, 44, 92): '"""Companies donating over $400 can specify a logo URL and site link"""'}, {}), "('Companies donating over $400 can specify a logo URL and site link')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((47, 23, 49, 14), 'django.forms.HiddenInput', 'forms.HiddenInput', (), '', False, 'from django import forms\n'), ((50, 24, 52, 14), 'django.forms.TextInput', 'forms.TextInput', (), '', False, 'from django import forms\n'), ((53, 24, 55, 14), 'django.forms.TextInput', 'forms.TextInput', (), '', False, 'from django import forms\n'), ((56, 29, 58, 14), 'django.forms.TextInput', 'forms.TextInput', (), '', False, 'from django import forms\n'), ((61, 43, 61, 62), 'django.forms.HiddenInput', 'forms.HiddenInput', ({}, {}), '()', False, 'from django import forms\n'), ((109, 21, 109, 73), 'django.utils.translation.ugettext_lazy', '_', ({(109, 23, 109, 72): '"""Your email is used so we can send you a receipt"""'}, {}), "('Your email is used so we can send you a receipt')", True, 'from django.utils.translation import ugettext_lazy as _\n'), ((112, 23, 114, 14), 'django.forms.HiddenInput', 'forms.HiddenInput', (), '', False, 'from django import forms\n'), ((115, 29, 117, 14), 'django.forms.TextInput', 'forms.TextInput', (), '', False, 'from django import forms\n'), ((120, 43, 120, 62), 'django.forms.HiddenInput', 'forms.HiddenInput', ({}, {}), '()', False, 'from django import forms\n')]
movermeyer/pandas_datareaders_unofficial
pandas_datareaders_unofficial/datareaders/google_finance_options.py
458dcf473d070cd7686d53d4a9b479cbe0ab9218
#!/usr/bin/env python # -*- coding: utf-8 -*- from .base import DataReaderBase from ..tools import COL, _get_dates, to_float, to_int import pandas as pd #from pandas.tseries.frequencies import to_offset from six.moves import cStringIO as StringIO import logging import traceback import datetime import json import token, tokenize def ymd_to_date(y, m, d): """ Returns date >>> expiration = {u'd': 1, u'm': 12, u'y': 2014} >>> ymd_to_date(**expiration) datetime.date(2014, 12, 1) >>> ymd_to_date(2014, 3, 1) datetime.date(2014, 3, 1) """ return(datetime.date(year=y, month=m, day=d)) def date_to_ymd(date): """ Returns dict like {'y': ..., 'm': ..., 'd': ...} >>> date_to_ymd(datetime.date(year=2010, month=1, day=3)) {'y': 2010, 'm': 1, 'd': 3} """ d = { 'y': date.year, 'm': date.month, 'd': date.day } return(d) def fix_lazy_json(in_text): """ Handle lazy JSON - to fix expecting property name this function fixes the json output from google http://stackoverflow.com/questions/4033633/handling-lazy-json-in-python-expecting-property-name """ tokengen = tokenize.generate_tokens(StringIO(in_text).readline) result = [] for tokid, tokval, _, _, _ in tokengen: # fix unquoted strings if (tokid == token.NAME): if tokval not in ['true', 'false', 'null', '-Infinity', 'Infinity', 'NaN']: tokid = token.STRING tokval = u'"%s"' % tokval # fix single-quoted strings elif (tokid == token.STRING): if tokval.startswith ("'"): tokval = u'"%s"' % tokval[1:-1].replace ('"', '\\"') # remove invalid commas elif (tokid == token.OP) and ((tokval == '}') or (tokval == ']')): if (len(result) > 0) and (result[-1][1] == ','): result.pop() # fix single-quoted strings elif (tokid == token.STRING): if tokval.startswith ("'"): tokval = u'"%s"' % tokval[1:-1].replace ('"', '\\"') result.append((tokid, tokval)) return tokenize.untokenize(result) def json_decode(json_string): try: ret = json.loads(json_string) except: json_string = fix_lazy_json(json_string) ret = json.loads(json_string) return ret class DataReaderGoogleFinanceOptions(DataReaderBase): """ DataReader to fetch data from Google Finance Options see https://www.google.com/finance/option_chain https://github.com/makmac213/python-google-option-chain http://www.drtomstarke.com/index.php/option-chains-from-google-finance-api """ def init(self, *args, **kwargs): self._get_multi = self._get_multi_todict def _get_one(self, name, *args, **kwargs): return(self._get_one_raw(name, 'All', 'json')) def _get_one_raw(self, symbol, typ='All', output='json', y='2014', m='12', d='1'): url = "https://www.google.com/finance/option_chain" params = { 'q': symbol, 'type': typ, 'output': output, } data = self._get_content(url, params) d = {} lst = [] for typ in [u'puts', u'calls']: df_typ = pd.DataFrame(data[typ]) df_typ['Type'] = typ lst.append(df_typ) del data[typ] for i, expiration in enumerate(data['expirations']): params = { 'q': symbol, 'output': output, 'expy': expiration['y'], 'expm': expiration['m'], 'expd': expiration['d'], } data = self._get_content(url, params) for typ in [u'puts', u'calls']: df_typ = pd.DataFrame(data[typ]) df_typ['Type'] = typ lst.append(df_typ) del data[typ] lst.append(df_typ) df = pd.concat(lst, axis=0, ignore_index=True) d_cols = { "a": "Ask", "b": "Bid", "p": "Last", "strike": "Strike", "expiry": "Expiry", "vol": "Volume", "name": "Name" } df = df.rename(columns=d_cols) """ d_cols = { "a": "ask", "b": "bid", "c": "change", "cid": "identity code", "cp": "cp" "cs": change direction. "chg" = up, "chr" = down, "chg"? "e": # I think this tells us something about what country where the stock is traded. "OPRA" means USA. "expiry": expiration date for this option "name": I don't know. I have never seen a value for this "oi": open interest. How many of these are currently being held by others. See, http://www.investopedia.com/terms/o/openinterest.asp "p": price, last "s": option code. Basically, Stock Symbol + 7 if mini option + date + "C" or "P" + price "strike": "strike price for this option" "vol": "the volume of options traded." } """ for col in ['Ask', 'Bid', 'c', 'cp', 'Last', 'Strike']: df[col] = df[col].map(to_float) for col in ['Volume', 'oi', 'cid']: df[col] = df[col].map(to_int) df['Expiry'] = pd.to_datetime(df['Expiry']) data['options'] = df data['underlying_id'] = int(data['underlying_id']) data['expiry'] = ymd_to_date(**data['expiry']) for i, expiration in enumerate(data['expirations']): data['expirations'][i] = ymd_to_date(**expiration) #for col in ['Volume']: # df[col] = df[col].fillna(0) #d = {} #d["options"] = df #return(d) return(data) def _get_content(self, url, params): #response = requests.get(url, params=params) response = self.session.get(url, params=params) if response.status_code == 200: content_json = response.text data = json_decode(content_json) return(data) if __name__ == "__main__": import doctest doctest.testmod()
[((30, 11, 30, 48), 'datetime.date', 'datetime.date', (), '', False, 'import datetime\n'), ((79, 11, 79, 38), 'tokenize.untokenize', 'tokenize.untokenize', ({(79, 31, 79, 37): 'result'}, {}), '(result)', False, 'import token, tokenize\n'), ((208, 4, 208, 21), 'doctest.testmod', 'doctest.testmod', ({}, {}), '()', False, 'import doctest\n'), ((83, 14, 83, 37), 'json.loads', 'json.loads', ({(83, 25, 83, 36): 'json_string'}, {}), '(json_string)', False, 'import json\n'), ((138, 13, 138, 54), 'pandas.concat', 'pd.concat', (), '', True, 'import pandas as pd\n'), ((179, 23, 179, 51), 'pandas.to_datetime', 'pd.to_datetime', ({(179, 38, 179, 50): "df['Expiry']"}, {}), "(df['Expiry'])", True, 'import pandas as pd\n'), ((52, 40, 52, 57), 'six.moves.cStringIO', 'StringIO', ({(52, 49, 52, 56): 'in_text'}, {}), '(in_text)', True, 'from six.moves import cStringIO as StringIO\n'), ((86, 14, 86, 37), 'json.loads', 'json.loads', ({(86, 25, 86, 36): 'json_string'}, {}), '(json_string)', False, 'import json\n'), ((117, 21, 117, 44), 'pandas.DataFrame', 'pd.DataFrame', ({(117, 34, 117, 43): 'data[typ]'}, {}), '(data[typ])', True, 'import pandas as pd\n'), ((132, 25, 132, 48), 'pandas.DataFrame', 'pd.DataFrame', ({(132, 38, 132, 47): 'data[typ]'}, {}), '(data[typ])', True, 'import pandas as pd\n')]
Vail-qin/Keras-TextClassification
keras_textclassification/data_preprocess/generator_preprocess.py
8acda5ae37db2647c8ecaa70027ffc6003d2abca
# !/usr/bin/python # -*- coding: utf-8 -*- # @time : 2019/11/2 21:08 # @author : Mo # @function: from keras_textclassification.data_preprocess.text_preprocess import load_json, save_json from keras_textclassification.conf.path_config import path_model_dir path_fast_text_model_vocab2index = path_model_dir + 'vocab2index.json' path_fast_text_model_l2i_i2l = path_model_dir + 'l2i_i2l.json' import numpy as np import os class PreprocessGenerator: """ 数据预处理, 输入为csv格式, [label,ques] """ def __init__(self): self.l2i_i2l = None if os.path.exists(path_fast_text_model_l2i_i2l): self.l2i_i2l = load_json(path_fast_text_model_l2i_i2l) def prereocess_idx(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_i2l = {} i2l = self.l2i_i2l['i2l'] for i in range(len(pred)): pred_i2l[i2l[str(i)]] = pred[i] pred_i2l_rank = [sorted(pred_i2l.items(), key=lambda k: k[1], reverse=True)] return pred_i2l_rank else: raise RuntimeError("path_fast_text_model_label2index is None") def prereocess_pred_xid(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_l2i = {} l2i = self.l2i_i2l['l2i'] for i in range(len(pred)): pred_l2i[pred[i]] = l2i[pred[i]] pred_l2i_rank = [sorted(pred_l2i.items(), key=lambda k: k[1], reverse=True)] return pred_l2i_rank else: raise RuntimeError("path_fast_text_model_label2index is None") def preprocess_get_label_set(self, path): # 首先获取label,set,即存在的具体类 label_set = set() len_all = 0 file_csv = open(path, "r", encoding="utf-8") for line in file_csv: len_all += 1 if len_all > 1: # 第一条是标签'label,ques',不选择 line_sp = line.split(",") label_org = str(line_sp[0]).strip().upper() label_real = "NAN" if label_org=="" else label_org label_set.add(label_real) file_csv.close() return label_set, len_all def preprocess_label_ques_to_idx(self, embedding_type, batch_size, path, embed, rate=1): label_set, len_all = self.preprocess_get_label_set(path) # 获取label转index字典等, 如果label2index存在则不转换了, dev验证集合的时候用 if not os.path.exists(path_fast_text_model_l2i_i2l): count = 0 label2index = {} index2label = {} for label_one in label_set: label2index[label_one] = count index2label[count] = label_one count = count + 1 l2i_i2l = {} l2i_i2l['l2i'] = label2index l2i_i2l['i2l'] = index2label save_json(l2i_i2l, path_fast_text_model_l2i_i2l) else: l2i_i2l = load_json(path_fast_text_model_l2i_i2l) # 读取数据的比例 len_ql = int(rate * len_all) if len_ql <= 500: # sample时候不生效,使得语料足够训练 len_ql = len_all def process_line(line): # 对每一条数据操作,获取label和问句index line_sp = line.split(",") ques = str(line_sp[1]).strip().upper() label = str(line_sp[0]).strip().upper() label = "NAN" if label == "" else label que_embed = embed.sentence2idx(ques) label_zeros = [0] * len(l2i_i2l['l2i']) label_zeros[l2i_i2l['l2i'][label]] = 1 return que_embed, label_zeros while True: file_csv = open(path, "r", encoding="utf-8") cout_all_line = 0 cnt = 0 x, y = [], [] # 跳出循环 if len_ql < cout_all_line: break for line in file_csv: cout_all_line += 1 if cout_all_line > 1: # 第一条是标签'label,ques',不选择 x_line, y_line = process_line(line) x.append(x_line) y.append(y_line) cnt += 1 if cnt == batch_size: if embedding_type in ['bert', 'albert']: x_, y_ = np.array(x), np.array(y) x_1 = np.array([x[0] for x in x_]) x_2 = np.array([x[1] for x in x_]) x_all = [x_1, x_2] elif embedding_type == 'xlnet': x_, y_ = x, np.array(y) x_1 = np.array([x[0][0] for x in x_]) x_2 = np.array([x[1][0] for x in x_]) x_3 = np.array([x[2][0] for x in x_]) x_all = [x_1, x_2, x_3] else: x_all, y_ = np.array(x), np.array(y) cnt = 0 yield (x_all, y_) x, y =[], [] file_csv.close() print("preprocess_label_ques_to_idx ok")
[((23, 11, 23, 55), 'os.path.exists', 'os.path.exists', ({(23, 26, 23, 54): 'path_fast_text_model_l2i_i2l'}, {}), '(path_fast_text_model_l2i_i2l)', False, 'import os\n'), ((27, 11, 27, 55), 'os.path.exists', 'os.path.exists', ({(27, 26, 27, 54): 'path_fast_text_model_l2i_i2l'}, {}), '(path_fast_text_model_l2i_i2l)', False, 'import os\n'), ((38, 11, 38, 55), 'os.path.exists', 'os.path.exists', ({(38, 26, 38, 54): 'path_fast_text_model_l2i_i2l'}, {}), '(path_fast_text_model_l2i_i2l)', False, 'import os\n'), ((24, 27, 24, 66), 'keras_textclassification.data_preprocess.text_preprocess.load_json', 'load_json', ({(24, 37, 24, 65): 'path_fast_text_model_l2i_i2l'}, {}), '(path_fast_text_model_l2i_i2l)', False, 'from keras_textclassification.data_preprocess.text_preprocess import load_json, save_json\n'), ((66, 15, 66, 59), 'os.path.exists', 'os.path.exists', ({(66, 30, 66, 58): 'path_fast_text_model_l2i_i2l'}, {}), '(path_fast_text_model_l2i_i2l)', False, 'import os\n'), ((78, 12, 78, 60), 'keras_textclassification.data_preprocess.text_preprocess.save_json', 'save_json', ({(78, 22, 78, 29): 'l2i_i2l', (78, 31, 78, 59): 'path_fast_text_model_l2i_i2l'}, {}), '(l2i_i2l, path_fast_text_model_l2i_i2l)', False, 'from keras_textclassification.data_preprocess.text_preprocess import load_json, save_json\n'), ((80, 22, 80, 61), 'keras_textclassification.data_preprocess.text_preprocess.load_json', 'load_json', ({(80, 32, 80, 60): 'path_fast_text_model_l2i_i2l'}, {}), '(path_fast_text_model_l2i_i2l)', False, 'from keras_textclassification.data_preprocess.text_preprocess import load_json, save_json\n'), ((116, 34, 116, 62), 'numpy.array', 'np.array', ({(116, 43, 116, 61): '[x[0] for x in x_]'}, {}), '([x[0] for x in x_])', True, 'import numpy as np\n'), ((117, 34, 117, 62), 'numpy.array', 'np.array', ({(117, 43, 117, 61): '[x[1] for x in x_]'}, {}), '([x[1] for x in x_])', True, 'import numpy as np\n'), ((115, 37, 115, 48), 'numpy.array', 'np.array', ({(115, 46, 115, 47): 'x'}, {}), '(x)', True, 'import numpy as np\n'), ((115, 50, 115, 61), 'numpy.array', 'np.array', ({(115, 59, 115, 60): 'y'}, {}), '(y)', True, 'import numpy as np\n'), ((121, 34, 121, 65), 'numpy.array', 'np.array', ({(121, 43, 121, 64): '[x[0][0] for x in x_]'}, {}), '([x[0][0] for x in x_])', True, 'import numpy as np\n'), ((122, 34, 122, 65), 'numpy.array', 'np.array', ({(122, 43, 122, 64): '[x[1][0] for x in x_]'}, {}), '([x[1][0] for x in x_])', True, 'import numpy as np\n'), ((123, 34, 123, 65), 'numpy.array', 'np.array', ({(123, 43, 123, 64): '[x[2][0] for x in x_]'}, {}), '([x[2][0] for x in x_])', True, 'import numpy as np\n'), ((120, 40, 120, 51), 'numpy.array', 'np.array', ({(120, 49, 120, 50): 'y'}, {}), '(y)', True, 'import numpy as np\n'), ((126, 40, 126, 51), 'numpy.array', 'np.array', ({(126, 49, 126, 50): 'x'}, {}), '(x)', True, 'import numpy as np\n'), ((126, 53, 126, 64), 'numpy.array', 'np.array', ({(126, 62, 126, 63): 'y'}, {}), '(y)', True, 'import numpy as np\n')]
metux/chromium-deb
content/test/gpu/gpu_tests/pixel_expectations.py
3c08e9b89a1b6f95f103a61ff4f528dbcd57fc42
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from gpu_tests.gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations class for documentation. class PixelExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: # self.Fail('Pixel_Canvas2DRedBox', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123) # Seems to be flaky on the new AMD R7 240 drivers. self.Flaky('Pixel_GpuRasterization_BlueBox', ['win', ('amd', 0x6613)], bug=653538) # Software compositing is not supported on Android; so we skip these tests # that disables gpu compositing on Android platforms. self.Skip('Pixel_OffscreenCanvasUnaccelerated2D', ['android']) self.Skip('Pixel_OffscreenCanvasUnaccelerated2DWorker', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositing', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositingWorker', ['android']) self.Skip('Pixel_CanvasDisplayLinearRGBUnaccelerated2D', ['android']) self.Fail('Pixel_ScissorTestWithPreserveDrawingBuffer', ['android'], bug=521588) # TODO(ccameron) fix these on Mac Retina self.Fail('Pixel_CSS3DBlueBox', ['mac'], bug=533690) # TODO(vmiura) check / generate reference images for Android devices self.Fail('Pixel_SolidColorBackground', ['mac', 'android'], bug=624256) self.Fail('Pixel_OffscreenCanvasUnaccelerated2DGPUCompositingWorker', ['mac', ('nvidia', 0xfe9)], bug=706016) self.Fail('Pixel_CSSFilterEffects', ['mac', ('nvidia', 0xfe9)], bug=690277) # TODO(kbr): flakily timing out on this configuration. self.Flaky('*', ['linux', 'intel', 'debug'], bug=648369) self.Flaky('Pixel_Video_MP4', ['android', 'nvidia'], bug=716564) # Flaky for unknown reasons only on macOS. Not planning to investigate # further. self.Flaky('Pixel_ScissorTestWithPreserveDrawingBuffer', ['mac'], bug=660461) self.Flaky('Pixel_OffscreenCanvas2DResizeOnWorker', ['win10', ('intel', 0x1912)], bug=690663) # TODO(zakerinasab): check / generate reference images. self.Fail('Pixel_Canvas2DUntagged', bug=713632) self.Flaky('Pixel_OffscreenCanvasTransferBeforeStyleResize', ['mac', 'linux', 'win', 'android'], bug=735228) self.Flaky('Pixel_OffscreenCanvasTransferAfterStyleResize', ['mac', 'linux', 'win', 'android'], bug=735171) # TODO(junov): update reference images self.Fail('Pixel_CSSFilterEffects', ['mac'], bug=721727) self.Fail('Pixel_CSSFilterEffects_NoOverlays', ['mac'], bug=721727) # TODO(dshwang): remove these after new reference images are generated. self.Fail('Pixel_DirectComposition_Video_MP4', bug=615325) self.Fail('Pixel_DirectComposition_Video_VP9', bug=615325) self.Fail('Pixel_Video_MP4', bug=615325) self.Fail('Pixel_Video_VP9', bug=615325)
[]
18F/data-federation-ingest
examples/p02_budgets/budget_data_ingest/migrations/0001_initial.py
a896ef2da1faf3966f018366b26a338bb66cc717
# -*- coding: utf-8 -*- # Generated by Django 1.11.13 on 2018-06-08 22:54 from __future__ import unicode_literals from django.conf import settings import django.contrib.postgres.fields.jsonb from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='BudgetItem', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('year', models.IntegerField()), ('agency', models.TextField()), ('data_source', models.TextField()), ('category', models.TextField()), ('dollars_budgeted', models.DecimalField(decimal_places=2, max_digits=14)), ('dollars_spent', models.DecimalField(decimal_places=2, max_digits=14)), ('row_number', models.IntegerField()), ], ), migrations.CreateModel( name='Upload', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('file_metadata', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('file', models.FileField(upload_to='')), ('raw', models.BinaryField(null=True)), ('validation_results', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('status', models.CharField(choices=[('LOADING', 'Loading'), ('PENDING', 'Pending'), ('STAGED', 'Staged'), ('INSERTED', 'Inserted'), ('DELETED', 'Deleted')], default='LOADING', max_length=10)), ('status_changed_at', models.DateTimeField(null=True)), ('replaces', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='replaced_by', to='budget_data_ingest.Upload')), ('status_changed_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)), ('submitter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'abstract': False, }, ), migrations.AddField( model_name='budgetitem', name='upload', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='budget_data_ingest.Upload'), ), ]
[((16, 8, 16, 65), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', ({(16, 40, 16, 64): 'settings.AUTH_USER_MODEL'}, {}), '(settings.AUTH_USER_MODEL)', False, 'from django.db import migrations, models\n'), ((56, 18, 56, 112), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((23, 23, 23, 112), 'django.db.models.AutoField', 'models.AutoField', (), '', False, 'from django.db import migrations, models\n'), ((24, 25, 24, 46), 'django.db.models.IntegerField', 'models.IntegerField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((25, 27, 25, 45), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((26, 32, 26, 50), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((27, 29, 27, 47), 'django.db.models.TextField', 'models.TextField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((28, 37, 28, 89), 'django.db.models.DecimalField', 'models.DecimalField', (), '', False, 'from django.db import migrations, models\n'), ((29, 34, 29, 86), 'django.db.models.DecimalField', 'models.DecimalField', (), '', False, 'from django.db import migrations, models\n'), ((30, 31, 30, 52), 'django.db.models.IntegerField', 'models.IntegerField', ({}, {}), '()', False, 'from django.db import migrations, models\n'), ((36, 23, 36, 112), 'django.db.models.AutoField', 'models.AutoField', (), '', False, 'from django.db import migrations, models\n'), ((37, 31, 37, 70), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((38, 31, 38, 66), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((40, 25, 40, 55), 'django.db.models.FileField', 'models.FileField', (), '', False, 'from django.db import migrations, models\n'), ((41, 24, 41, 53), 'django.db.models.BinaryField', 'models.BinaryField', (), '', False, 'from django.db import migrations, models\n'), ((43, 27, 43, 207), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import migrations, models\n'), ((44, 38, 44, 69), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import migrations, models\n'), ((45, 29, 45, 162), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((46, 38, 46, 158), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n'), ((47, 30, 47, 121), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import migrations, models\n')]
Kaslanarian/PythonSVM
setup.py
715eeef2a245736167addf45a6aee8b40b54d0c7
import setuptools #enables develop setuptools.setup( name='pysvm', version='0.1', description='PySVM : A NumPy implementation of SVM based on SMO algorithm', author_email="[email protected]", packages=['pysvm'], license='MIT License', long_description=open('README.md', encoding='utf-8').read(), install_requires=[ #自动安装依赖 'numpy', 'sklearn' ], url='https://github.com/Kaslanarian/PySVM', )
[]
hiperus0988/pyao
Object_detection_image.py
72c56975a3d45aa033bdf7650b5369d59240395f
######## Image Object Detection Using Tensorflow-trained Classifier ######### # # Author: Evan Juras # Date: 1/15/18 # Description: # This program uses a TensorFlow-trained classifier to perform object detection. # It loads the classifier uses it to perform object detection on an image. # It draws boxes and scores around the objects of interest in the image. ## Some of the code is copied from Google's example at ## https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb ## and some is copied from Dat Tran's example at ## https://github.com/datitran/object_detector_app/blob/master/object_detection_app.py ## but I changed it to make it more understandable to me. # Import packages import os import cv2 import numpy as np import tensorflow as tf import sys # This is needed since the notebook is stored in the object_detection folder. sys.path.append("..") # Import utilites from utils import label_map_util from utils import visualization_utils as vis_util # Name of the directory containing the object detection module we're using MODEL_NAME = 'inference_graph' IMAGE_NAME = 'test1.jpg' # Grab path to current working directory CWD_PATH = os.getcwd() # Path to frozen detection graph .pb file, which contains the model that is used # for object detection. PATH_TO_CKPT = os.path.join(CWD_PATH,MODEL_NAME,'frozen_inference_graph.pb') # Path to label map file PATH_TO_LABELS = os.path.join(CWD_PATH,'training','labelmap.pbtxt') # Path to image PATH_TO_IMAGE = os.path.join(CWD_PATH,IMAGE_NAME) # Number of classes the object detector can identify NUM_CLASSES = 6 # Load the label map. # Label maps map indices to category names, so that when our convolution # network predicts `5`, we know that this corresponds to `king`. # Here we use internal utility functions, but anything that returns a # dictionary mapping integers to appropriate string labels would be fine label_map = label_map_util.load_labelmap(PATH_TO_LABELS) categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES, use_display_name=True) category_index = label_map_util.create_category_index(categories) # Load the Tensorflow model into memory. detection_graph = tf.Graph() with detection_graph.as_default(): od_graph_def = tf.GraphDef() with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid: serialized_graph = fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='') sess = tf.Session(graph=detection_graph) # Define input and output tensors (i.e. data) for the object detection classifier # Input tensor is the image image_tensor = detection_graph.get_tensor_by_name('image_tensor:0') # Output tensors are the detection boxes, scores, and classes # Each box represents a part of the image where a particular object was detected detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0') # Each score represents level of confidence for each of the objects. # The score is shown on the result image, together with the class label. detection_scores = detection_graph.get_tensor_by_name('detection_scores:0') detection_classes = detection_graph.get_tensor_by_name('detection_classes:0') # Number of objects detected num_detections = detection_graph.get_tensor_by_name('num_detections:0') # Load image using OpenCV and # expand image dimensions to have shape: [1, None, None, 3] # i.e. a single-column array, where each item in the column has the pixel RGB value image = cv2.imread(PATH_TO_IMAGE) image_expanded = np.expand_dims(image, axis=0) # Perform the actual detection by running the model with the image as input (boxes, scores, classes, num) = sess.run( [detection_boxes, detection_scores, detection_classes, num_detections], feed_dict={image_tensor: image_expanded}) # Draw the results of the detection (aka 'visulaize the results') vis_util.visualize_boxes_and_labels_on_image_array( image, np.squeeze(boxes), np.squeeze(classes).astype(np.int32), np.squeeze(scores), category_index, use_normalized_coordinates=True, line_thickness=8, min_score_thresh=0.60) # All the results have been drawn on image. Now display the image. cv2.imshow('Object detector', image) # Press any key to close the image cv2.waitKey(0) # Clean up cv2.destroyAllWindows()
[((26, 0, 26, 21), 'sys.path.append', 'sys.path.append', ({(26, 16, 26, 20): '""".."""'}, {}), "('..')", False, 'import sys\n'), ((37, 11, 37, 22), 'os.getcwd', 'os.getcwd', ({}, {}), '()', False, 'import os\n'), ((41, 15, 41, 76), 'os.path.join', 'os.path.join', ({(41, 28, 41, 36): 'CWD_PATH', (41, 37, 41, 47): 'MODEL_NAME', (41, 48, 41, 75): '"""frozen_inference_graph.pb"""'}, {}), "(CWD_PATH, MODEL_NAME, 'frozen_inference_graph.pb')", False, 'import os\n'), ((44, 17, 44, 67), 'os.path.join', 'os.path.join', ({(44, 30, 44, 38): 'CWD_PATH', (44, 39, 44, 49): '"""training"""', (44, 50, 44, 66): '"""labelmap.pbtxt"""'}, {}), "(CWD_PATH, 'training', 'labelmap.pbtxt')", False, 'import os\n'), ((47, 16, 47, 49), 'os.path.join', 'os.path.join', ({(47, 29, 47, 37): 'CWD_PATH', (47, 38, 47, 48): 'IMAGE_NAME'}, {}), '(CWD_PATH, IMAGE_NAME)', False, 'import os\n'), ((57, 12, 57, 56), 'utils.label_map_util.load_labelmap', 'label_map_util.load_labelmap', ({(57, 41, 57, 55): 'PATH_TO_LABELS'}, {}), '(PATH_TO_LABELS)', False, 'from utils import label_map_util\n'), ((58, 13, 58, 122), 'utils.label_map_util.convert_label_map_to_categories', 'label_map_util.convert_label_map_to_categories', (), '', False, 'from utils import label_map_util\n'), ((59, 17, 59, 65), 'utils.label_map_util.create_category_index', 'label_map_util.create_category_index', ({(59, 54, 59, 64): 'categories'}, {}), '(categories)', False, 'from utils import label_map_util\n'), ((62, 18, 62, 28), 'tensorflow.Graph', 'tf.Graph', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((92, 8, 92, 33), 'cv2.imread', 'cv2.imread', ({(92, 19, 92, 32): 'PATH_TO_IMAGE'}, {}), '(PATH_TO_IMAGE)', False, 'import cv2\n'), ((93, 17, 93, 46), 'numpy.expand_dims', 'np.expand_dims', (), '', True, 'import numpy as np\n'), ((113, 0, 113, 36), 'cv2.imshow', 'cv2.imshow', ({(113, 11, 113, 28): '"""Object detector"""', (113, 30, 113, 35): 'image'}, {}), "('Object detector', image)", False, 'import cv2\n'), ((116, 0, 116, 14), 'cv2.waitKey', 'cv2.waitKey', ({(116, 12, 116, 13): '(0)'}, {}), '(0)', False, 'import cv2\n'), ((119, 0, 119, 23), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ({}, {}), '()', False, 'import cv2\n'), ((64, 19, 64, 32), 'tensorflow.GraphDef', 'tf.GraphDef', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((70, 11, 70, 44), 'tensorflow.Session', 'tf.Session', (), '', True, 'import tensorflow as tf\n'), ((104, 4, 104, 21), 'numpy.squeeze', 'np.squeeze', ({(104, 15, 104, 20): 'boxes'}, {}), '(boxes)', True, 'import numpy as np\n'), ((106, 4, 106, 22), 'numpy.squeeze', 'np.squeeze', ({(106, 15, 106, 21): 'scores'}, {}), '(scores)', True, 'import numpy as np\n'), ((65, 9, 65, 43), 'tensorflow.gfile.GFile', 'tf.gfile.GFile', ({(65, 24, 65, 36): 'PATH_TO_CKPT', (65, 38, 65, 42): '"""rb"""'}, {}), "(PATH_TO_CKPT, 'rb')", True, 'import tensorflow as tf\n'), ((68, 8, 68, 50), 'tensorflow.import_graph_def', 'tf.import_graph_def', (), '', True, 'import tensorflow as tf\n'), ((105, 4, 105, 23), 'numpy.squeeze', 'np.squeeze', ({(105, 15, 105, 22): 'classes'}, {}), '(classes)', True, 'import numpy as np\n')]
chris48s/UK-Polling-Stations
polling_stations/apps/data_collection/management/commands/import_torbay.py
4742b527dae94f0276d35c80460837be743b7d17
from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter class Command(BaseXpressDemocracyClubCsvImporter): council_id = 'E06000027' addresses_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' stations_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' elections = ['parl.2017-06-08'] csv_delimiter = '\t'
[]
Bhavya0020/Readopolis
Backend/product/views.py
a0053e4fae97dc8291b50c746f3dc3e6b454ad95
from django.db.models import Q from django.shortcuts import render from django.http import Http404 # Create your views here. from rest_framework.views import APIView from rest_framework.response import Response from rest_framework.decorators import api_view from .models import Product, Category from .serializers import ProductSerializer, CategorySerializer class LatestProductsList(APIView): def get(self, request, format=None): products = Product.objects.all()[0:4] serializer = ProductSerializer(products,many=True) return Response(serializer.data) class ProductDetail(APIView): def get_object(self, category_slug, product_slug): try: return Product.objects.filter(category__slug=category_slug).get(slug=product_slug) except Product.DoesNotExist: raise Http404 def get(self, request, category_slug, product_slug, format= None): product = self.get_object(category_slug, product_slug) serializer = ProductSerializer(product) return Response(serializer.data) class CategoryDetail(APIView): def get_object(self, category_slug): try: return Category.objects.get(slug=category_slug) except Category.DoesNotExist: raise Http404 def get(self, request, category_slug, format= None): category = self.get_object(category_slug) serializer = CategorySerializer(category) return Response(serializer.data) @api_view(['POST']) def search(request): query = request.data.get('query', '') if query: products = Product.objects.filter(Q(name__icontains=query) | Q(description__icontains=query)) serializer = ProductSerializer(products, many=True) return Response(serializer.data) else: return Response({"products": []})
[((44, 1, 44, 19), 'rest_framework.decorators.api_view', 'api_view', ({(44, 10, 44, 18): "['POST']"}, {}), "(['POST'])", False, 'from rest_framework.decorators import api_view\n'), ((18, 15, 18, 40), 'rest_framework.response.Response', 'Response', ({(18, 24, 18, 39): 'serializer.data'}, {}), '(serializer.data)', False, 'from rest_framework.response import Response\n'), ((30, 15, 30, 40), 'rest_framework.response.Response', 'Response', ({(30, 24, 30, 39): 'serializer.data'}, {}), '(serializer.data)', False, 'from rest_framework.response import Response\n'), ((42, 15, 42, 40), 'rest_framework.response.Response', 'Response', ({(42, 24, 42, 39): 'serializer.data'}, {}), '(serializer.data)', False, 'from rest_framework.response import Response\n'), ((51, 15, 51, 40), 'rest_framework.response.Response', 'Response', ({(51, 24, 51, 39): 'serializer.data'}, {}), '(serializer.data)', False, 'from rest_framework.response import Response\n'), ((53, 15, 53, 41), 'rest_framework.response.Response', 'Response', ({(53, 24, 53, 40): "{'products': []}"}, {}), "({'products': []})", False, 'from rest_framework.response import Response\n'), ((49, 42, 49, 66), 'django.db.models.Q', 'Q', (), '', False, 'from django.db.models import Q\n'), ((49, 69, 49, 100), 'django.db.models.Q', 'Q', (), '', False, 'from django.db.models import Q\n')]
hubogeri/python_training
model/contact.py
7a918040e4c8bae5a031134911bc8b465f322699
from sys import maxsize class Contact: def __init__(self, fname=None, mname=None, lname=None, nick=None, title=None, comp=None, addr=None, home=None, mobile=None, work=None, fax=None, email1=None, email2=None, email3=None, homepage=None, bday=None, bmonth=None, byear=None, aday=None, amonth=None, ayear=None, secaddr=None, secphone=None, note=None, id =None): self.fname = fname self.mname = mname self.lname = lname self.nick = nick self.title = title self.comp = comp self.addr = addr self.home = home self.mobile = mobile self.work = work self.fax = fax self.email1 = email1 self.email2 = email2 self.email3 = email3 self.homepage = homepage self.bday = bday self.bmonth = bmonth self.byear = byear self.aday = aday self.amonth = amonth self.ayear = ayear self.secaddr = secaddr self.secphone = secphone self.note = note self.id = id def __repr__(self): return "%s:%s:%s" % (self.id, self.fname, self.lname) def __eq__(self, other): return (self.id is None or other.id is None or self.id == other.id) and self.fname == other.fname and self.lname == other.lname def id_or_max(self): if self.id: return int(self.id) else: return maxsize
[]
ericmehl/cortex
test/IECore/BasicPreset.py
054839cc709ce153d1bcaaefe7f340ebe641ec82
########################################################################## # # Copyright (c) 2010-2012, Image Engine Design Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # * Neither the name of Image Engine Design nor the names of any # other contributors to this software may be used to endorse or # promote products derived from this software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## from __future__ import with_statement import os import sys import shutil import unittest import IECore class TestBasicPreset( unittest.TestCase ) : def testCopy( self ) : testObj = IECore.Parameterised( "testParameterised1" ) testObj.parameters().addParameters( [ IECore.BoolParameter( "a", "", True ), IECore.FloatParameter( "b", "", 1.0 ), ] ) testObj2 = IECore.Parameterised( "testParameterised2" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( "a", "", False ), IECore.FloatParameter( "c", "", 0.0 ), ] ) p = IECore.BasicPreset( testObj, testObj.parameters() ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse( p.applicableTo( testObj2, testObj2.parameters() ) ) testObj.parameters()["a"].setTypedValue( False ) testObj.parameters()["b"].setTypedValue( 0.0 ) p( testObj, testObj.parameters() ) self.assertEqual( testObj.parameters()["a"].getTypedValue(), True ) self.assertEqual( testObj.parameters()["b"].getTypedValue(), 1.0 ) p2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()["a"], ) ) self.assertTrue( p2.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p2.applicableTo( testObj2, testObj.parameters() ) ) p2( testObj2, testObj2.parameters() ) self.assertEqual( testObj2.parameters()["a"].getTypedValue(), True ) self.assertEqual( testObj2.parameters()["c"].getTypedValue(), 0.0 ) def testLoad( self ) : testObj = IECore.Parameterised( "testParameterised1" ) testObj.parameters().addParameters( [ IECore.BoolParameter( "a", "", True ), IECore.FloatParameter( "b", "", 1.0 ), ] ) testObj2 = IECore.Parameterised( "testParameterised1" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( "a", "", False ), IECore.FloatParameter( "c", "", 0.0 ), ] ) savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), "data", "basicPreset" ) ) messageHandler = IECore.CapturingMessageHandler() with messageHandler : p = IECore.BasicPreset( os.path.join( savePath, "basicPresetLoadTest", "basicPresetLoadTest-1.cob" ) ) self.assertEqual( len( messageHandler.messages ), 0 ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse( p.applicableTo( testObj2, testObj2.parameters() ) ) testObj.parameters()["a"].setTypedValue( False ) testObj.parameters()["b"].setTypedValue( 0.0 ) p( testObj, testObj.parameters() ) self.assertEqual( testObj.parameters()["a"].getTypedValue(), True ) self.assertEqual( testObj.parameters()["b"].getTypedValue(), 1.0 ) def testSave( self ) : testObj = IECore.Parameterised( "testParameterised1" ) testObj.parameters().addParameters( [ IECore.BoolParameter( "a", "", True ), IECore.FloatParameter( "b", "", 1.0 ), ] ) testObj2 = IECore.Parameterised( "testParameterised1" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( "a", "", False ), IECore.FloatParameter( "c", "", 0.0 ), ] ) savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), "data", "basicPreset" ) ) preset = IECore.BasicPreset( testObj, testObj.parameters() ) # Save for the classLoader and check its there, we test the 'loadability' later... preset.save( savePath, "basicPresetTest" ) self.assertTrue( os.path.isfile( os.path.join( savePath, "basicPresetTest", "basicPresetTest-1.cob" ) ) ) self.assertTrue( os.path.isfile( os.path.join( savePath, "basicPresetTest", "basicPresetTest-1.py" ) ) ) # save without the classLoader and check its there preset.save( savePath, "basicPresetTest", classLoadable=False ) self.assertTrue( os.path.isfile( os.path.join( savePath, "basicPresetTest.cob" ) ) ) # reload p = IECore.BasicPreset( os.path.join( savePath, "basicPresetTest.cob" ) ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse( p.applicableTo( testObj2, testObj2.parameters() ) ) testObj.parameters()["a"].setTypedValue( False ) testObj.parameters()["b"].setTypedValue( 0.0 ) p( testObj, testObj.parameters() ) self.assertEqual( testObj.parameters()["a"].getTypedValue(), True ) self.assertEqual( testObj.parameters()["b"].getTypedValue(), 1.0 ) preset2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()["a"], ) ) preset2.save( savePath, "basicPresetTest2", classLoadable=False ) #reload p2 = IECore.BasicPreset( os.path.join( savePath, "basicPresetTest2.cob" ) ) self.assertTrue( p2.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p2.applicableTo( testObj2, testObj.parameters() ) ) p2( testObj2, testObj2.parameters() ) self.assertEqual( testObj2.parameters()["a"].getTypedValue(), True ) self.assertEqual( testObj2.parameters()["c"].getTypedValue(), 0.0 ) def testClassLoader( self ) : testObj = IECore.Parameterised( "testParameterised1" ) testObj.parameters().addParameters( [ IECore.BoolParameter( "a", "", True ), IECore.FloatParameter( "b", "", 1.0 ), ] ) savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), "data", "basicPreset" ) ) preset = IECore.BasicPreset( testObj, testObj.parameters() ) preset.save( savePath, "basicPresetTestClassLoader" ) # make sure that no messages are emitted during loading messageHandler = IECore.CapturingMessageHandler() with messageHandler : loader = IECore.ClassLoader( IECore.SearchPath( savePath ) ) p = loader.load( "basicPresetTestClassLoader" )() self.assertEqual( len( messageHandler.messages ), 0 ) self.assertTrue( isinstance( p, IECore.BasicPreset ) ) p.metadata() def testClasses( self ) : testObj = IECore.Parameterised( "testParameterised1" ) testObj.parameters().addParameters( [ IECore.BoolParameter( "a", "", True ), IECore.ClassParameter( "b", "", "IECORE_OP_PATHS", os.path.join( "maths", "multiply" ), 2 ), ] ) testObj2 = IECore.Parameterised( "testParameterised2" ) testObj2.parameters().addParameters( [ IECore.ClassParameter( "c", "", "IECORE_OP_PATHS" ), ] ) classes1 = testObj.parameters()["b"].getClass( True ) classes2 = testObj2.parameters()["c"].getClass( True ) self.assertNotEqual( classes1[1:], classes2[1:] ) p = IECore.BasicPreset( testObj, testObj.parameters()["b"] ) self.assertTrue( p.applicableTo( testObj, testObj.parameters()["b"] ) ) self.assertFalse( p.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p.applicableTo( testObj2, testObj2.parameters()["c"] ) ) p( testObj2, testObj2.parameters()["c"] ) classes1 = testObj.parameters()["b"].getClass( True ) classes2 = testObj2.parameters()["c"].getClass( True ) self.assertEqual( classes1[1:], classes2[1:] ) def testClassVectors( self ) : testObj = IECore.Parameterised( "testParameterised1" ) testObj.parameters().addParameters( [ IECore.BoolParameter( "a", "", True ), IECore.ClassVectorParameter( "b", "", "IECORE_OP_PATHS" ), ] ) testObj.parameters()["b"].setClasses( [ ( "mult", os.path.join( "maths", "multiply" ), 2 ), ( "coIO", "compoundObjectInOut", 1 ), ] ) testObj2 = IECore.Parameterised( "testParameterised2" ) testObj2.parameters().addParameters( [ IECore.ClassVectorParameter( "c", "", "IECORE_OP_PATHS" ), ] ) classes1 = [ c[1:] for c in testObj.parameters()["b"].getClasses( True ) ] classes2 = [ c[1:] for c in testObj2.parameters()["c"].getClasses( True ) ] self.assertNotEqual( classes1, classes2 ) p = IECore.BasicPreset( testObj, testObj.parameters()["b"] ) self.assertTrue( p.applicableTo( testObj, testObj.parameters()["b"] ) ) self.assertFalse( p.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p.applicableTo( testObj2, testObj2.parameters()["c"] ) ) p( testObj2, testObj2.parameters()["c"] ) classes1 = [ c[1:] for c in testObj.parameters()["b"].getClasses( True ) ] classes2 = [ c[1:] for c in testObj2.parameters()["c"].getClasses( True ) ] self.assertEqual( classes1, classes2 ) def testCompoundVectorParameter( self ) : p = IECore.Parameterised( "test" ) p.parameters().addParameters( [ IECore.BoolParameter( "a", "", False ), IECore.CompoundVectorParameter( "c", "", members = [ IECore.StringVectorParameter( "s", "", IECore.StringVectorData() ), IECore.BoolVectorParameter( "b", "", IECore.BoolVectorData() ), ] ) ] ) p["c"]["s"].setValue( IECore.StringVectorData( [ "1", "2", "3" ] ) ) p["c"]["b"].setValue( IECore.BoolVectorData( [ True, False, True ] ) ) v = p.parameters().getValue().copy() preset = IECore.BasicPreset( p, p.parameters() ) self.assertTrue( preset.applicableTo( p, p.parameters() ) ) p.parameters().setValue( p.parameters().defaultValue ) self.assertNotEqual( p.parameters().getValue(), v ) preset( p, p.parameters() ) self.assertEqual( p.parameters().getValue(), v ) def tearDown( self ) : savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), "data", "basicPreset" ) ) paths = ( os.path.join( savePath, "basicPresetTest" ), os.path.join( savePath, "basicPresetTest.cob" ), os.path.join( savePath, "basicPresetTest2.cob" ), os.path.join( savePath, "basicPresetTestClassLoader" ), ) for p in paths : if os.path.isdir( p ) : shutil.rmtree( p ) elif os.path.isfile( p ) : os.remove( p ) if __name__ == "__main__": unittest.main()
[((336, 1, 336, 16), 'unittest.main', 'unittest.main', ({}, {}), '()', False, 'import unittest\n'), ((47, 12, 47, 56), 'IECore.Parameterised', 'IECore.Parameterised', ({(47, 34, 47, 54): '"""testParameterised1"""'}, {}), "('testParameterised1')", False, 'import IECore\n'), ((55, 13, 55, 57), 'IECore.Parameterised', 'IECore.Parameterised', ({(55, 35, 55, 55): '"""testParameterised2"""'}, {}), "('testParameterised2')", False, 'import IECore\n'), ((88, 12, 88, 56), 'IECore.Parameterised', 'IECore.Parameterised', ({(88, 34, 88, 54): '"""testParameterised1"""'}, {}), "('testParameterised1')", False, 'import IECore\n'), ((96, 13, 96, 57), 'IECore.Parameterised', 'IECore.Parameterised', ({(96, 35, 96, 55): '"""testParameterised1"""'}, {}), "('testParameterised1')", False, 'import IECore\n'), ((106, 19, 106, 51), 'IECore.CapturingMessageHandler', 'IECore.CapturingMessageHandler', ({}, {}), '()', False, 'import IECore\n'), ((126, 12, 126, 56), 'IECore.Parameterised', 'IECore.Parameterised', ({(126, 34, 126, 54): '"""testParameterised1"""'}, {}), "('testParameterised1')", False, 'import IECore\n'), ((134, 13, 134, 57), 'IECore.Parameterised', 'IECore.Parameterised', ({(134, 35, 134, 55): '"""testParameterised1"""'}, {}), "('testParameterised1')", False, 'import IECore\n'), ((185, 12, 185, 56), 'IECore.Parameterised', 'IECore.Parameterised', ({(185, 34, 185, 54): '"""testParameterised1"""'}, {}), "('testParameterised1')", False, 'import IECore\n'), ((198, 19, 198, 51), 'IECore.CapturingMessageHandler', 'IECore.CapturingMessageHandler', ({}, {}), '()', False, 'import IECore\n'), ((212, 12, 212, 56), 'IECore.Parameterised', 'IECore.Parameterised', ({(212, 34, 212, 54): '"""testParameterised1"""'}, {}), "('testParameterised1')", False, 'import IECore\n'), ((220, 13, 220, 57), 'IECore.Parameterised', 'IECore.Parameterised', ({(220, 35, 220, 55): '"""testParameterised2"""'}, {}), "('testParameterised2')", False, 'import IECore\n'), ((246, 12, 246, 56), 'IECore.Parameterised', 'IECore.Parameterised', ({(246, 34, 246, 54): '"""testParameterised1"""'}, {}), "('testParameterised1')", False, 'import IECore\n'), ((261, 13, 261, 57), 'IECore.Parameterised', 'IECore.Parameterised', ({(261, 35, 261, 55): '"""testParameterised2"""'}, {}), "('testParameterised2')", False, 'import IECore\n'), ((288, 6, 288, 36), 'IECore.Parameterised', 'IECore.Parameterised', ({(288, 28, 288, 34): '"""test"""'}, {}), "('test')", False, 'import IECore\n'), ((156, 26, 156, 73), 'os.path.join', 'os.path.join', ({(156, 40, 156, 48): 'savePath', (156, 50, 156, 71): '"""basicPresetTest.cob"""'}, {}), "(savePath, 'basicPresetTest.cob')", False, 'import os\n'), ((173, 27, 173, 75), 'os.path.join', 'os.path.join', ({(173, 41, 173, 49): 'savePath', (173, 51, 173, 73): '"""basicPresetTest2.cob"""'}, {}), "(savePath, 'basicPresetTest2.cob')", False, 'import os\n'), ((303, 24, 303, 68), 'IECore.StringVectorData', 'IECore.StringVectorData', ({(303, 49, 303, 66): "['1', '2', '3']"}, {}), "(['1', '2', '3'])", False, 'import IECore\n'), ((304, 24, 304, 70), 'IECore.BoolVectorData', 'IECore.BoolVectorData', ({(304, 47, 304, 68): '[True, False, True]'}, {}), '([True, False, True])', False, 'import IECore\n'), ((323, 3, 323, 46), 'os.path.join', 'os.path.join', ({(323, 17, 323, 25): 'savePath', (323, 27, 323, 44): '"""basicPresetTest"""'}, {}), "(savePath, 'basicPresetTest')", False, 'import os\n'), ((324, 3, 324, 50), 'os.path.join', 'os.path.join', ({(324, 17, 324, 25): 'savePath', (324, 27, 324, 48): '"""basicPresetTest.cob"""'}, {}), "(savePath, 'basicPresetTest.cob')", False, 'import os\n'), ((325, 3, 325, 51), 'os.path.join', 'os.path.join', ({(325, 17, 325, 25): 'savePath', (325, 27, 325, 49): '"""basicPresetTest2.cob"""'}, {}), "(savePath, 'basicPresetTest2.cob')", False, 'import os\n'), ((326, 3, 326, 57), 'os.path.join', 'os.path.join', ({(326, 17, 326, 25): 'savePath', (326, 27, 326, 55): '"""basicPresetTestClassLoader"""'}, {}), "(savePath, 'basicPresetTestClassLoader')", False, 'import os\n'), ((330, 6, 330, 24), 'os.path.isdir', 'os.path.isdir', ({(330, 21, 330, 22): 'p'}, {}), '(p)', False, 'import os\n'), ((50, 4, 50, 41), 'IECore.BoolParameter', 'IECore.BoolParameter', ({(50, 26, 50, 29): '"""a"""', (50, 31, 50, 33): '""""""', (50, 35, 50, 39): '(True)'}, {}), "('a', '', True)", False, 'import IECore\n'), ((51, 4, 51, 41), 'IECore.FloatParameter', 'IECore.FloatParameter', ({(51, 27, 51, 30): '"""b"""', (51, 32, 51, 34): '""""""', (51, 36, 51, 39): '(1.0)'}, {}), "('b', '', 1.0)", False, 'import IECore\n'), ((58, 4, 58, 42), 'IECore.BoolParameter', 'IECore.BoolParameter', ({(58, 26, 58, 29): '"""a"""', (58, 31, 58, 33): '""""""', (58, 35, 58, 40): '(False)'}, {}), "('a', '', False)", False, 'import IECore\n'), ((59, 4, 59, 41), 'IECore.FloatParameter', 'IECore.FloatParameter', ({(59, 27, 59, 30): '"""c"""', (59, 32, 59, 34): '""""""', (59, 36, 59, 39): '(0.0)'}, {}), "('c', '', 0.0)", False, 'import IECore\n'), ((91, 4, 91, 41), 'IECore.BoolParameter', 'IECore.BoolParameter', ({(91, 26, 91, 29): '"""a"""', (91, 31, 91, 33): '""""""', (91, 35, 91, 39): '(True)'}, {}), "('a', '', True)", False, 'import IECore\n'), ((92, 4, 92, 41), 'IECore.FloatParameter', 'IECore.FloatParameter', ({(92, 27, 92, 30): '"""b"""', (92, 32, 92, 34): '""""""', (92, 36, 92, 39): '(1.0)'}, {}), "('b', '', 1.0)", False, 'import IECore\n'), ((99, 4, 99, 42), 'IECore.BoolParameter', 'IECore.BoolParameter', ({(99, 26, 99, 29): '"""a"""', (99, 31, 99, 33): '""""""', (99, 35, 99, 40): '(False)'}, {}), "('a', '', False)", False, 'import IECore\n'), ((100, 4, 100, 41), 'IECore.FloatParameter', 'IECore.FloatParameter', ({(100, 27, 100, 30): '"""c"""', (100, 32, 100, 34): '""""""', (100, 36, 100, 39): '(0.0)'}, {}), "('c', '', 0.0)", False, 'import IECore\n'), ((104, 44, 104, 71), 'os.path.dirname', 'os.path.dirname', ({(104, 61, 104, 69): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((109, 27, 109, 103), 'os.path.join', 'os.path.join', ({(109, 41, 109, 49): 'savePath', (109, 51, 109, 72): '"""basicPresetLoadTest"""', (109, 74, 109, 101): '"""basicPresetLoadTest-1.cob"""'}, {}), "(savePath, 'basicPresetLoadTest', 'basicPresetLoadTest-1.cob')", False, 'import os\n'), ((129, 4, 129, 41), 'IECore.BoolParameter', 'IECore.BoolParameter', ({(129, 26, 129, 29): '"""a"""', (129, 31, 129, 33): '""""""', (129, 35, 129, 39): '(True)'}, {}), "('a', '', True)", False, 'import IECore\n'), ((130, 4, 130, 41), 'IECore.FloatParameter', 'IECore.FloatParameter', ({(130, 27, 130, 30): '"""b"""', (130, 32, 130, 34): '""""""', (130, 36, 130, 39): '(1.0)'}, {}), "('b', '', 1.0)", False, 'import IECore\n'), ((137, 4, 137, 42), 'IECore.BoolParameter', 'IECore.BoolParameter', ({(137, 26, 137, 29): '"""a"""', (137, 31, 137, 33): '""""""', (137, 35, 137, 40): '(False)'}, {}), "('a', '', False)", False, 'import IECore\n'), ((138, 4, 138, 41), 'IECore.FloatParameter', 'IECore.FloatParameter', ({(138, 27, 138, 30): '"""c"""', (138, 32, 138, 34): '""""""', (138, 36, 138, 39): '(0.0)'}, {}), "('c', '', 0.0)", False, 'import IECore\n'), ((142, 44, 142, 71), 'os.path.dirname', 'os.path.dirname', ({(142, 61, 142, 69): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((148, 35, 148, 103), 'os.path.join', 'os.path.join', ({(148, 49, 148, 57): 'savePath', (148, 59, 148, 76): '"""basicPresetTest"""', (148, 78, 148, 101): '"""basicPresetTest-1.cob"""'}, {}), "(savePath, 'basicPresetTest', 'basicPresetTest-1.cob')", False, 'import os\n'), ((149, 35, 149, 102), 'os.path.join', 'os.path.join', ({(149, 49, 149, 57): 'savePath', (149, 59, 149, 76): '"""basicPresetTest"""', (149, 78, 149, 100): '"""basicPresetTest-1.py"""'}, {}), "(savePath, 'basicPresetTest', 'basicPresetTest-1.py')", False, 'import os\n'), ((153, 35, 153, 82), 'os.path.join', 'os.path.join', ({(153, 49, 153, 57): 'savePath', (153, 59, 153, 80): '"""basicPresetTest.cob"""'}, {}), "(savePath, 'basicPresetTest.cob')", False, 'import os\n'), ((188, 4, 188, 41), 'IECore.BoolParameter', 'IECore.BoolParameter', ({(188, 26, 188, 29): '"""a"""', (188, 31, 188, 33): '""""""', (188, 35, 188, 39): '(True)'}, {}), "('a', '', True)", False, 'import IECore\n'), ((189, 4, 189, 41), 'IECore.FloatParameter', 'IECore.FloatParameter', ({(189, 27, 189, 30): '"""b"""', (189, 32, 189, 34): '""""""', (189, 36, 189, 39): '(1.0)'}, {}), "('b', '', 1.0)", False, 'import IECore\n'), ((193, 44, 193, 71), 'os.path.dirname', 'os.path.dirname', ({(193, 61, 193, 69): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((201, 32, 201, 61), 'IECore.SearchPath', 'IECore.SearchPath', ({(201, 51, 201, 59): 'savePath'}, {}), '(savePath)', False, 'import IECore\n'), ((215, 4, 215, 41), 'IECore.BoolParameter', 'IECore.BoolParameter', ({(215, 26, 215, 29): '"""a"""', (215, 31, 215, 33): '""""""', (215, 35, 215, 39): '(True)'}, {}), "('a', '', True)", False, 'import IECore\n'), ((223, 4, 223, 55), 'IECore.ClassParameter', 'IECore.ClassParameter', ({(223, 27, 223, 30): '"""c"""', (223, 32, 223, 34): '""""""', (223, 36, 223, 53): '"""IECORE_OP_PATHS"""'}, {}), "('c', '', 'IECORE_OP_PATHS')", False, 'import IECore\n'), ((249, 4, 249, 41), 'IECore.BoolParameter', 'IECore.BoolParameter', ({(249, 26, 249, 29): '"""a"""', (249, 31, 249, 33): '""""""', (249, 35, 249, 39): '(True)'}, {}), "('a', '', True)", False, 'import IECore\n'), ((250, 4, 250, 61), 'IECore.ClassVectorParameter', 'IECore.ClassVectorParameter', ({(250, 33, 250, 36): '"""b"""', (250, 38, 250, 40): '""""""', (250, 42, 250, 59): '"""IECORE_OP_PATHS"""'}, {}), "('b', '', 'IECORE_OP_PATHS')", False, 'import IECore\n'), ((264, 4, 264, 61), 'IECore.ClassVectorParameter', 'IECore.ClassVectorParameter', ({(264, 33, 264, 36): '"""c"""', (264, 38, 264, 40): '""""""', (264, 42, 264, 59): '"""IECORE_OP_PATHS"""'}, {}), "('c', '', 'IECORE_OP_PATHS')", False, 'import IECore\n'), ((291, 4, 291, 42), 'IECore.BoolParameter', 'IECore.BoolParameter', ({(291, 26, 291, 29): '"""a"""', (291, 31, 291, 33): '""""""', (291, 35, 291, 40): '(False)'}, {}), "('a', '', False)", False, 'import IECore\n'), ((321, 44, 321, 71), 'os.path.dirname', 'os.path.dirname', ({(321, 61, 321, 69): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((331, 4, 331, 22), 'shutil.rmtree', 'shutil.rmtree', ({(331, 19, 331, 20): 'p'}, {}), '(p)', False, 'import shutil\n'), ((332, 8, 332, 27), 'os.path.isfile', 'os.path.isfile', ({(332, 24, 332, 25): 'p'}, {}), '(p)', False, 'import os\n'), ((216, 55, 216, 90), 'os.path.join', 'os.path.join', ({(216, 69, 216, 76): '"""maths"""', (216, 78, 216, 88): '"""multiply"""'}, {}), "('maths', 'multiply')", False, 'import os\n'), ((256, 14, 256, 49), 'os.path.join', 'os.path.join', ({(256, 28, 256, 35): '"""maths"""', (256, 37, 256, 47): '"""multiply"""'}, {}), "('maths', 'multiply')", False, 'import os\n'), ((333, 4, 333, 18), 'os.remove', 'os.remove', ({(333, 15, 333, 16): 'p'}, {}), '(p)', False, 'import os\n'), ((296, 45, 296, 70), 'IECore.StringVectorData', 'IECore.StringVectorData', ({}, {}), '()', False, 'import IECore\n'), ((297, 43, 297, 66), 'IECore.BoolVectorData', 'IECore.BoolVectorData', ({}, {}), '()', False, 'import IECore\n')]
imanolarrieta/RL
rlpy/Domains/Pacman.py
072a8c328652f45e053baecd640f04adf7f84b49
"""Pacman game domain.""" from rlpy.Tools import __rlpy_location__ from .Domain import Domain from .PacmanPackage import layout, pacman, game, ghostAgents from .PacmanPackage import graphicsDisplay import numpy as np from copy import deepcopy import os import time __copyright__ = "Copyright 2013, RLPy http://acl.mit.edu/RLPy" __credits__ = ["Alborz Geramifard", "Robert H. Klein", "Christoph Dann", "William Dabney", "Jonathan P. How"] __license__ = "BSD 3-Clause" __author__ = "Austin Hays" class Pacman(Domain): """ Pacman domain, which acts as a wrapper for the Pacman implementation from the BerkeleyX/CS188.1x course project 3. **STATE:** The state vector has a series of dimensions: * [2] The x and y coordinates of pacman * [3 * ng] the x and y coordinates as well as the scare time of each ghost ("scare time" is how long the ghost remains scared after consuming a capsule.) * [nf] binary variables indicating if a food is still on the board or not * [nc] binary variables for each capsule indicating if it is still on the board or not *nf* and *nc* are map-dependent, and *ng* can be set as a parameter. Based on above, total dimensionality of state vector is map-dependent, and given by (2 + 3*ng + nf + nc). **ACTIONS:** Move Pacman [up, down, left, right, stay] **REWARD:** See the Berkeley project website below for more info. .. note:: The visualization runs as fast as your CPU will permit; to slow things down so gameplay is actually visible, de-comment time.sleep() in the showDomain() method. **REFERENCE:** This domain is an RLPy wrapper for the implementation from the `BerkeleyX/CS188.1x course project 3 <https://courses.edx.org/courses/BerkeleyX/CS188.1x/2013_Spring/courseware/Week_9/Project_3_Reinforcement/>`_ See the original `source code (zipped) <https://courses.edx.org/static/content-berkeley-cs188x~2013_Spring/projects/reinforcement/reinforcement.zip>`_ For more details of the domain see the original package in the `Domains/PacmanPackage` folder. """ _max_scared_time = 39 actions = ["Stop", "North", "East", "South", "West"] actions_num = 5 episodeCap = 1000 #: location of layouts shipped with rlpy default_layout_dir = os.path.join( __rlpy_location__, "Domains", "PacmanPackage", "layouts") def __init__(self, noise=.1, timeout=30, layoutFile=os.path.join( default_layout_dir, 'trickyClassic.lay'), numGhostAgents=1000): """ layoutFile: filename of the map file noise: with this probability pacman makes a random move instead the one specified by the action """ self.noise = noise # Specifies which Pacman world you want self.layoutFile = layoutFile # Puts the file in line stripped format layout_file_content = self._tryToLoad(self.layoutFile) self.layout = layout.Layout(layout_file_content) # Number of ghosts self.numGhostAgents = numGhostAgents # Intitializes Pacman game self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout) self.layout_copy = deepcopy(self.layout) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self.num_total_food = len(self.layout_copy.food.asList()) self.num_total_capsules = len(self.layout_copy.capsules) self._defaultSettings() self.restartGraphics = None self.timerswitch = False self.savedtimer = None self.gameDisplay = None self._set_statespace_limits() super(Pacman, self).__init__() def _set_statespace_limits(self): # Makes an array of limits for each dimension in the state vector. statespace_limits = [] # adds pacman x, y locations statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height - 2]) # adds ghost x, y locations and scaredTimer (how long they can be # eaten) for ghost in self.game_state.data.agentStates[1:]: statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height - 2]) statespace_limits.append([0, self._max_scared_time]) statespace_limits += [[0, 1]] * ( self.num_total_food + self.num_total_capsules) self.statespace_limits = np.array(statespace_limits, dtype="float") def _set_state(self, s): """ Takes a vector s and sets the internal game state used by the original pacman package. """ # copies most recent state data = self.game_state.data agent_states = data.agentStates # set pacman position agent_states.configuration.pos = (s[0], s[1]) # set ghost position num_ghosts = len(agent_states) - 1 for i in range(1, num_ghosts + 1): part_s = s[(3 * i) - 1:3 * i] agent_states[i].configuration.pos = (part_s[0], part_s[1]) agent_states[i].scaredTimer = part_s[2] # set food and capsules locations s_food = s[(num_ghosts + 1) * 3:] x = 0 y = 0 i = 0 data.capsules = [] for char in str(self.layout_copy): if char == ".": data.food[x][y] = bool(s_food[i]) i += 1 elif char == "o": coord = (x, self.layout_copy.height - y) if s_food[i]: data.capsules.append(coord) i += 1 elif char == "\n": y += 1 x = -1 x += 1 def _get_state(self): """ get the internal game state represented as a numpy array """ data = self.game_state.data agent_states = self.game_state.data.agentStates num_ghosts = len(agent_states) - 1 s = np.zeros( 2 + num_ghosts * 3 + self.num_total_food + self.num_total_capsules) # get pacman position s[:2] = agent_states[0].configuration.pos # import ipdb; ipdb.set_trace() # get ghost info for i in range(num_ghosts): s[2 + i * 3: 2 + i * 3 + 2] = agent_states[i + 1].configuration.pos s[2 + i * 3 + 2] = agent_states[i + 1].scaredTimer # get food and capsules status i = 2 + num_ghosts * 3 x = 0 y = 0 for char in str(self.layout_copy): if char == ".": s[i] = data.food[x][y] i += 1 elif char == "\n": y += 1 x = -1 elif char == "o": coord = (x, self.layout_copy.height - y) if coord in data.capsules: s[i] = 1. i += 1 x += 1 return s state = property(_get_state, _set_state) def showDomain(self, a, s=None): if s is not None: errStr = 'ERROR: In Pacman.py, attempted to pass a state (s)'\ 'to showDomain(); Pacman only supports internal states.'\ 'If you do pass a state parameter, ensure it is set to None.' raise Exception(errStr) s = self.game_state if self.gameDisplay is None: self.gameDisplay = graphicsDisplay.PacmanGraphics() self.gameDisplay.startGraphics(self) self.gameDisplay.drawStaticObjects(s.data) self.gameDisplay.drawAgentObjects(s.data) elif self._cleanup_graphics: self._cleanup_graphics = False self.gameDisplay.removeAllFood() self.gameDisplay.removeAllCapsules() self.gameDisplay.food = self.gameDisplay.drawFood( self.gameDisplay.layout.food) self.gameDisplay.capsules = self.gameDisplay.drawCapsules( self.gameDisplay.layout.capsules) # converts s vector in pacman gamestate instance and updates # the display every time pacman or a ghost moves. # s.data.food is the correct food matrix s.data.layout.food = s.data.food for agent in range(len(s.data.agentStates)): s.data._agentMoved = agent self.gameDisplay.update(s.data) s._foodEaten = None s._capsuleEaten = None # time.sleep(0.1) # Sleep for 0.1 sec def step(self, a): """ Applies actions from outside the Pacman domain to the given state. Internal states accounted for along with scoring and terminal checking. Returns a tuple of form (reward, new state vector, terminal) """ if self.random_state.random_sample() < self.noise: # Random Move a = self.random_state.choice(self.possibleActions()) a = self.actions[a] next_state_p = self.game_state.generateSuccessor(0, a) next_state = next_state_p # pacman performs action "a" in current state object # pacman.PacmanRules.applyAction(self.game_state, a) # pacman.GhostRules.checkDeath(self.game_state, 0) # the ghosts move randomly for i in range(1, len(self.game_state.data.agentStates)): if next_state.isWin() or next_state.isLose(): break ghostOptions = pacman.GhostRules.getLegalActions(next_state, i) # TODO: use domain random stream randomAction_ind = self.random_state.randint(len(ghostOptions)) randomAction = ghostOptions[randomAction_ind] next_state = next_state.generateSuccessor(i, randomAction) # keep track of eaten stuff for graphics (original code assumes # graphics are updated after every agent's move) next_state.data._foodEaten = next_state_p.data._foodEaten next_state.data._capsuleEaten = next_state_p.data._capsuleEaten # scoring in pacman r = next_state.data.score - self.game_state.data.score self.game_state = next_state terminal = self.isTerminal() return r, self._get_state(), terminal, self.possibleActions() def s0(self): """ re-initializes internal states when an episode starts, returns a s vector """ self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout=30) self.layout_copy = deepcopy(self.layout) self.game = self.game_rules.newGame( self.layout_copy, pacman, self.ghosts, DummyGraphics(), self.beQuiet, catchExceptions=False) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self._cleanup_graphics = True return self.state, self.isTerminal(), self.possibleActions() def possibleActions(self): if self.isTerminal(): # somewhat hacky, but should not matter anyway, maybe clean up in # the future return np.array([0]) # makes an array of possible actions pacman can perform at any given # state possibleActions = [] possibleMoves = pacman.GameState.getLegalActions( self.game_state, agentIndex=0) for a in possibleMoves: possibleActions.append(self.actions.index(a)) return np.array(possibleActions) def isTerminal(self): """ Checks whether the game should terminate at the given state. (Terminate for failure, ie eaten by ghost or out of time, and for success, all food on map eaten.) If game should terminate, returns the proper indication to step function. Accounts for scoring changes in terminal states. """ return self.game_state.data._lose or self.game_state.data._win def _defaultSettings(self): self.ghostNum = 2 self.ghosts = [ghostAgents.RandomGhost( game.Agent) for i in range(self.ghostNum)] self.beQuiet = False def _tryToLoad(self, fullname): # used in getLayout function f = open(fullname) grid = [line.strip() for line in f] f.close() return grid class DummyGraphics(object): def initialize(self, *arg, **kwargs): pass def update(self, *arg, **kwargs): pass def finalize(self, *arg, **kwargs): pass
[((61, 25, 63, 18), 'os.path.join', 'os.path.join', ({(62, 8, 62, 25): '__rlpy_location__', (62, 27, 62, 36): '"""Domains"""', (62, 38, 62, 53): '"""PacmanPackage"""', (63, 8, 63, 17): '"""layouts"""'}, {}), "(__rlpy_location__, 'Domains', 'PacmanPackage', 'layouts')", False, 'import os\n'), ((66, 28, 67, 61), 'os.path.join', 'os.path.join', ({(67, 21, 67, 39): 'default_layout_dir', (67, 41, 67, 60): '"""trickyClassic.lay"""'}, {}), "(default_layout_dir, 'trickyClassic.lay')", False, 'import os\n'), ((87, 27, 87, 48), 'copy.deepcopy', 'deepcopy', ({(87, 36, 87, 47): 'self.layout'}, {}), '(self.layout)', False, 'from copy import deepcopy\n'), ((115, 33, 115, 75), 'numpy.array', 'np.array', (), '', True, 'import numpy as np\n'), ((164, 12, 165, 79), 'numpy.zeros', 'np.zeros', ({(165, 12, 165, 78): '2 + num_ghosts * 3 + self.num_total_food + self.num_total_capsules'}, {}), '(2 + num_ghosts * 3 + self.num_total_food + self.num_total_capsules)', True, 'import numpy as np\n'), ((265, 27, 265, 48), 'copy.deepcopy', 'deepcopy', ({(265, 36, 265, 47): 'self.layout'}, {}), '(self.layout)', False, 'from copy import deepcopy\n'), ((286, 15, 286, 40), 'numpy.array', 'np.array', ({(286, 24, 286, 39): 'possibleActions'}, {}), '(possibleActions)', True, 'import numpy as np\n'), ((278, 19, 278, 32), 'numpy.array', 'np.array', ({(278, 28, 278, 31): '[0]'}, {}), '([0])', True, 'import numpy as np\n')]
rickdg/vivi
core/src/zeit/cms/settings/interfaces.py
16134ac954bf8425646d4ad47bdd1f372e089355
from zeit.cms.i18n import MessageFactory as _ import zope.interface import zope.schema class IGlobalSettings(zope.interface.Interface): """Global CMS settings.""" default_year = zope.schema.Int( title=_("Default year"), min=1900, max=2100) default_volume = zope.schema.Int( title=_("Default volume"), min=1, max=54) def get_working_directory(template): """Return the collection which is the main working directory. template: Template which will be filled with year and volume. In ``template`` the placeholders $year and $volume will be replaced. Example: 'online/$year/$volume/foo' If the respective collection does not exist, it will be created before returning it. """
[((10, 14, 10, 31), 'zeit.cms.i18n.MessageFactory', '_', ({(10, 16, 10, 30): '"""Default year"""'}, {}), "('Default year')", True, 'from zeit.cms.i18n import MessageFactory as _\n'), ((15, 14, 15, 33), 'zeit.cms.i18n.MessageFactory', '_', ({(15, 16, 15, 32): '"""Default volume"""'}, {}), "('Default volume')", True, 'from zeit.cms.i18n import MessageFactory as _\n')]
c-yan/atcoder
abc/abc165/abc165e.py
940e49d576e6a2d734288fadaf368e486480a948
N, M = map(int, input().split()) for i in range(1, M + 1): if i % 2 == 1: j = (i - 1) // 2 print(1 + j, M + 1 - j) else: j = (i - 2) // 2 print(M + 2 + j, 2 * M + 1 - j)
[]
giggslam/python-messengerbot-sdk
setup.py
4a6fadf96fe3425da9abc4726fbb84db6d84f7b5
#!/usr/bin/env python # -*- coding: utf-8 -*- # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import re import sys from setuptools import setup from setuptools.command.test import test as TestCommand __version__ = '' with open('facebookbot/__about__.py', 'r') as fd: reg = re.compile(r'__version__ = [\'"]([^\'"]*)[\'"]') for line in fd: m = reg.match(line) if m: __version__ = m.group(1) break def _requirements(): with open('requirements.txt', 'r') as fd: return [name.strip() for name in fd.readlines()] with open('README.rst', 'r') as fd: long_description = fd.read() setup( name="fbsdk", version=__version__, author="Sam Chang", author_email="[email protected]", maintainer="Sam Chang", maintainer_email="[email protected]", url="https://github.com/boompieman/fbsdk", description="Facebook Messaging API SDK for Python", long_description=long_description, license='Apache License 2.0', packages=[ "facebookbot", "facebookbot.models" ], install_requires=_requirements(), classifiers=[ "Development Status :: 5 - Production/Stable", "License :: OSI Approved :: Apache Software License", "Intended Audience :: Developers", "Programming Language :: Python :: 3", "Topic :: Software Development" ] )
[((24, 10, 24, 58), 're.compile', 're.compile', ({(24, 21, 24, 57): '"""__version__ = [\\\\\'"]([^\\\\\'"]*)[\\\\\'"]"""'}, {}), '(\'__version__ = [\\\\\\\'"]([^\\\\\\\'"]*)[\\\\\\\'"]\')', False, 'import re\n')]
MaximovaIrina/transformers
src/transformers/models/mmbt/modeling_mmbt.py
033c3ed95a14b58f5a657f5124bc5988e4109c9f
# coding=utf-8 # Copyright (c) Facebook, Inc. and its affiliates. # Copyright (c) HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """PyTorch MMBT model. """ import torch from torch import nn from torch.nn import CrossEntropyLoss, MSELoss from ...file_utils import add_start_docstrings, add_start_docstrings_to_model_forward, replace_return_docstrings from ...modeling_outputs import BaseModelOutputWithPooling, SequenceClassifierOutput from ...modeling_utils import ModuleUtilsMixin from ...utils import logging logger = logging.get_logger(__name__) _CONFIG_FOR_DOC = "MMBTConfig" class ModalEmbeddings(nn.Module): """Generic Modal Embeddings which takes in an encoder, and a transformer embedding.""" def __init__(self, config, encoder, embeddings): super().__init__() self.config = config self.encoder = encoder self.proj_embeddings = nn.Linear(config.modal_hidden_size, config.hidden_size) self.position_embeddings = embeddings.position_embeddings self.token_type_embeddings = embeddings.token_type_embeddings self.word_embeddings = embeddings.word_embeddings self.LayerNorm = embeddings.LayerNorm self.dropout = nn.Dropout(p=config.hidden_dropout_prob) def forward(self, input_modal, start_token=None, end_token=None, position_ids=None, token_type_ids=None): token_embeddings = self.proj_embeddings(self.encoder(input_modal)) seq_length = token_embeddings.size(1) if start_token is not None: start_token_embeds = self.word_embeddings(start_token) seq_length += 1 token_embeddings = torch.cat([start_token_embeds.unsqueeze(1), token_embeddings], dim=1) if end_token is not None: end_token_embeds = self.word_embeddings(end_token) seq_length += 1 token_embeddings = torch.cat([token_embeddings, end_token_embeds.unsqueeze(1)], dim=1) if position_ids is None: position_ids = torch.arange(seq_length, dtype=torch.long, device=input_modal.device) position_ids = position_ids.unsqueeze(0).expand(input_modal.size(0), seq_length) if token_type_ids is None: token_type_ids = torch.zeros( (input_modal.size(0), seq_length), dtype=torch.long, device=input_modal.device ) position_embeddings = self.position_embeddings(position_ids) token_type_embeddings = self.token_type_embeddings(token_type_ids) embeddings = token_embeddings + position_embeddings + token_type_embeddings embeddings = self.LayerNorm(embeddings) embeddings = self.dropout(embeddings) return embeddings MMBT_START_DOCSTRING = r""" MMBT model was proposed in [Supervised Multimodal Bitransformers for Classifying Images and Text](https://github.com/facebookresearch/mmbt) by Douwe Kiela, Suvrat Bhooshan, Hamed Firooz, Davide Testuggine. It's a supervised multimodal bitransformer model that fuses information from text and other image encoders, and obtain state-of-the-art performance on various multimodal classification benchmark tasks. This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads etc.) This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage and behavior. Parameters: config ([`MMBTConfig`]): Model configuration class with all the parameters of the model. Initializing with a config file does not load the weights associated with the model, only the configuration. transformer (:class: *~nn.Module*): A text transformer that is used by MMBT. It should have embeddings, encoder, and pooler attributes. encoder (:class: *~nn.Module*): Encoder for the second modality. It should take in a batch of modal inputs and return k, n dimension embeddings. """ MMBT_INPUTS_DOCSTRING = r""" Args: input_modal (`torch.FloatTensor` of shape `(batch_size, ***)`): The other modality data. It will be the shape that the encoder for that type expects. e.g. With an Image Encoder, the shape would be (batch_size, channels, height, width) input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. It does not expect [CLS] token to be added as it's appended to the end of other modality embeddings. Indices can be obtained using [`BertTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) modal_start_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional start token to be added to Other Modality Embedding. [CLS] Most commonly used for classification tasks. modal_end_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional end token to be added to Other Modality Embedding. [SEP] Most commonly used. attention_mask (*optional*) `torch.FloatTensor` of shape `(batch_size, sequence_length)`: Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size, sequence_length)`: Segment token indices to indicate first and second portions of the inputs. Indices are selected in `[0, 1]`: - 0 corresponds to a *sentence A* token, - 1 corresponds to a *sentence B* token. [What are token type IDs?](../glossary#token-type-ids) modal_token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size, modal_sequence_length)`: Segment token indices to indicate different portions of the non-text modality. The embeddings from these tokens will be summed with the respective token embeddings for the non-text modality. position_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0, config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids) modal_position_ids (`torch.LongTensor` of shape `(batch_size, modal_sequence_length)`, *optional*): Indices of positions of each input sequence tokens in the position embeddings for the non-text modality. Selected in the range `[0, config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids) head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to nullify selected heads of the self-attention modules. Mask values selected in `[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates the head is **masked**. inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, embedding_dim)`, *optional*): Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert `input_ids` indices into associated vectors than the model's internal embedding lookup matrix. encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention if the model is configured as a decoder. encoder_attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in the cross-attention if the model is configured as a decoder. Mask values selected in `[0, 1]`: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~file_utils.ModelOutput`] instead of a plain tuple. """ @add_start_docstrings( "The bare MMBT Model outputting raw hidden-states without any specific head on top.", MMBT_START_DOCSTRING, ) class MMBTModel(nn.Module, ModuleUtilsMixin): def __init__(self, config, transformer, encoder): super().__init__() self.config = config self.transformer = transformer self.modal_encoder = ModalEmbeddings(config, encoder, transformer.embeddings) @add_start_docstrings_to_model_forward(MMBT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=_CONFIG_FOR_DOC) def forward( self, input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, encoder_hidden_states=None, encoder_attention_mask=None, output_attentions=None, output_hidden_states=None, return_dict=None, ): r""" Returns: Examples:: # For example purposes. Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) mmbt = MMBTModel(config, transformer, encoder) """ output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None else self.config.use_return_dict if input_ids is not None and inputs_embeds is not None: raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time") elif input_ids is not None: input_txt_shape = input_ids.size() elif inputs_embeds is not None: input_txt_shape = inputs_embeds.size()[:-1] else: raise ValueError("You have to specify either input_ids or inputs_embeds") device = input_ids.device if input_ids is not None else inputs_embeds.device modal_embeddings = self.modal_encoder( input_modal, start_token=modal_start_tokens, end_token=modal_end_tokens, position_ids=modal_position_ids, token_type_ids=modal_token_type_ids, ) input_modal_shape = modal_embeddings.size()[:-1] if token_type_ids is None: token_type_ids = torch.ones(input_txt_shape, dtype=torch.long, device=device) txt_embeddings = self.transformer.embeddings( input_ids=input_ids, position_ids=position_ids, token_type_ids=token_type_ids, inputs_embeds=inputs_embeds ) embedding_output = torch.cat([modal_embeddings, txt_embeddings], 1) input_shape = embedding_output.size()[:-1] if attention_mask is None: attention_mask = torch.ones(input_shape, device=device) else: attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device, dtype=torch.long), attention_mask], dim=1 ) if encoder_attention_mask is None: encoder_attention_mask = torch.ones(input_shape, device=device) else: encoder_attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device), encoder_attention_mask], dim=1 ) extended_attention_mask = self.get_extended_attention_mask(attention_mask, input_shape, self.device) encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers) encoder_outputs = self.transformer.encoder( embedding_output, attention_mask=extended_attention_mask, head_mask=head_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_extended_attention_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = encoder_outputs[0] pooled_output = self.transformer.pooler(sequence_output) if not return_dict: return (sequence_output, pooled_output) + encoder_outputs[1:] return BaseModelOutputWithPooling( last_hidden_state=sequence_output, pooler_output=pooled_output, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) def get_input_embeddings(self): return self.embeddings.word_embeddings def set_input_embeddings(self, value): self.embeddings.word_embeddings = value @add_start_docstrings( """ MMBT Model with a sequence classification/regression head on top (a linear layer on top of the pooled output) """, MMBT_START_DOCSTRING, MMBT_INPUTS_DOCSTRING, ) class MMBTForClassification(nn.Module): r""" **labels**: (*optional*) `torch.LongTensor` of shape `(batch_size,)`: Labels for computing the sequence classification/regression loss. Indices should be in `[0, ..., config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If `config.num_labels > 1` a classification loss is computed (Cross-Entropy). Returns: *Tuple* comprising various elements depending on the configuration (config) and inputs: **loss**: (*optional*, returned when `labels` is provided) `torch.FloatTensor` of shape `(1,)`: Classification (or regression if config.num_labels==1) loss. **logits**: `torch.FloatTensor` of shape `(batch_size, config.num_labels)` Classification (or regression if config.num_labels==1) scores (before SoftMax). **hidden_states**: (*optional*, returned when `output_hidden_states=True`) list of `torch.FloatTensor` (one for the output of each layer + the output of the embeddings) of shape `(batch_size, sequence_length, hidden_size)`: Hidden-states of the model at the output of each layer plus the initial embedding outputs. **attentions**: (*optional*, returned when `output_attentions=True`) list of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`: Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. Examples: ```python # For example purposes. Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) model = MMBTForClassification(config, transformer, encoder) outputs = model(input_modal, input_ids, labels=labels) loss, logits = outputs[:2] ```""" def __init__(self, config, transformer, encoder): super().__init__() self.num_labels = config.num_labels self.mmbt = MMBTModel(config, transformer, encoder) self.dropout = nn.Dropout(config.hidden_dropout_prob) self.classifier = nn.Linear(config.hidden_size, config.num_labels) def forward( self, input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, labels=None, return_dict=None, ): return_dict = return_dict if return_dict is not None else self.config.use_return_dict outputs = self.mmbt( input_modal=input_modal, input_ids=input_ids, modal_start_tokens=modal_start_tokens, modal_end_tokens=modal_end_tokens, attention_mask=attention_mask, token_type_ids=token_type_ids, modal_token_type_ids=modal_token_type_ids, position_ids=position_ids, modal_position_ids=modal_position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, return_dict=return_dict, ) pooled_output = outputs[1] pooled_output = self.dropout(pooled_output) logits = self.classifier(pooled_output) loss = None if labels is not None: if self.num_labels == 1: # We are doing regression loss_fct = MSELoss() loss = loss_fct(logits.view(-1), labels.view(-1)) else: loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) if not return_dict: output = (logits,) + outputs[2:] return ((loss,) + output) if loss is not None else output return SequenceClassifierOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, )
[((41, 31, 41, 86), 'torch.nn.Linear', 'nn.Linear', ({(41, 41, 41, 65): 'config.modal_hidden_size', (41, 67, 41, 85): 'config.hidden_size'}, {}), '(config.modal_hidden_size, config.hidden_size)', False, 'from torch import nn\n'), ((46, 23, 46, 63), 'torch.nn.Dropout', 'nn.Dropout', (), '', False, 'from torch import nn\n'), ((252, 27, 252, 75), 'torch.cat', 'torch.cat', ({(252, 37, 252, 71): '[modal_embeddings, txt_embeddings]', (252, 73, 252, 74): '1'}, {}), '([modal_embeddings, txt_embeddings], 1)', False, 'import torch\n'), ((343, 23, 343, 61), 'torch.nn.Dropout', 'nn.Dropout', ({(343, 34, 343, 60): 'config.hidden_dropout_prob'}, {}), '(config.hidden_dropout_prob)', False, 'from torch import nn\n'), ((344, 26, 344, 74), 'torch.nn.Linear', 'nn.Linear', ({(344, 36, 344, 54): 'config.hidden_size', (344, 56, 344, 73): 'config.num_labels'}, {}), '(config.hidden_size, config.num_labels)', False, 'from torch import nn\n'), ((63, 27, 63, 96), 'torch.arange', 'torch.arange', (), '', False, 'import torch\n'), ((246, 29, 246, 89), 'torch.ones', 'torch.ones', (), '', False, 'import torch\n'), ((257, 29, 257, 67), 'torch.ones', 'torch.ones', (), '', False, 'import torch\n'), ((263, 37, 263, 75), 'torch.ones', 'torch.ones', (), '', False, 'import torch\n'), ((388, 27, 388, 36), 'torch.nn.MSELoss', 'MSELoss', ({}, {}), '()', False, 'from torch.nn import CrossEntropyLoss, MSELoss\n'), ((391, 27, 391, 45), 'torch.nn.CrossEntropyLoss', 'CrossEntropyLoss', ({}, {}), '()', False, 'from torch.nn import CrossEntropyLoss, MSELoss\n'), ((260, 17, 260, 79), 'torch.ones', 'torch.ones', (), '', False, 'import torch\n'), ((266, 17, 266, 61), 'torch.ones', 'torch.ones', (), '', False, 'import torch\n')]
mhchia/trinity
eth2/beacon/chains/base.py
e40e475064ca4605887706e9b0e4f8e2349b10cd
from abc import ( ABC, abstractmethod, ) import logging from typing import ( TYPE_CHECKING, Tuple, Type, ) from eth._utils.datatypes import ( Configurable, ) from eth.db.backends.base import ( BaseAtomicDB, ) from eth.exceptions import ( BlockNotFound, ) from eth.validation import ( validate_word, ) from eth_typing import ( Hash32, ) from eth_utils import ( ValidationError, encode_hex, ) from eth2._utils.ssz import ( validate_imported_block_unchanged, ) from eth2.beacon.db.chain import ( BaseBeaconChainDB, BeaconChainDB, ) from eth2.beacon.exceptions import ( BlockClassError, StateMachineNotFound, ) from eth2.beacon.types.blocks import ( BaseBeaconBlock, ) from eth2.beacon.types.states import ( BeaconState, ) from eth2.beacon.typing import ( FromBlockParams, Slot, ) from eth2.beacon.validation import ( validate_slot, ) if TYPE_CHECKING: from eth2.beacon.state_machines.base import ( # noqa: F401 BaseBeaconStateMachine, ) class BaseBeaconChain(Configurable, ABC): """ The base class for all BeaconChain objects """ chaindb = None # type: BaseBeaconChainDB chaindb_class = None # type: Type[BaseBeaconChainDB] sm_configuration = None # type: Tuple[Tuple[Slot, Type[BaseBeaconStateMachine]], ...] chain_id = None # type: int # # Helpers # @classmethod @abstractmethod def get_chaindb_class(cls) -> Type[BaseBeaconChainDB]: pass # # Chain API # @classmethod @abstractmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': pass # # State Machine API # @classmethod @abstractmethod def get_state_machine_class( cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: pass @abstractmethod def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': pass @classmethod @abstractmethod def get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']: pass # # Block API # @abstractmethod def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: pass @abstractmethod def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: pass @abstractmethod def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_head(self) -> BaseBeaconBlock: pass @abstractmethod def get_score(self, block_root: Hash32) -> int: pass @abstractmethod def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: pass @abstractmethod def get_block(self) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_root(self, slot: Slot) -> Hash32: pass @abstractmethod def import_block( self, block: BaseBeaconBlock, perform_validation: bool=True ) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: pass class BeaconChain(BaseBeaconChain): """ A Chain is a combination of one or more ``StateMachine`` classes. Each ``StateMachine`` is associated with a range of slots. The Chain class acts as a wrapper around these other StateMachine classes, delegating operations to the appropriate StateMachine depending on the current block slot number. """ logger = logging.getLogger("eth2.beacon.chains.BeaconChain") chaindb_class = BeaconChainDB # type: Type[BaseBeaconChainDB] def __init__(self, base_db: BaseAtomicDB) -> None: if not self.sm_configuration: raise ValueError( "The Chain class cannot be instantiated with an empty `sm_configuration`" ) else: # TODO implment validate_sm_configuration(self.sm_configuration) # validate_sm_configuration(self.sm_configuration) pass self.chaindb = self.get_chaindb_class()(base_db) # # Helpers # @classmethod def get_chaindb_class(cls) -> Type['BaseBeaconChainDB']: if cls.chaindb_class is None: raise AttributeError("`chaindb_class` not set") return cls.chaindb_class # # Chain API # @classmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': """ Initialize the ``BeaconChain`` from a genesis state. """ sm_class = cls.get_state_machine_class_for_block_slot(genesis_block.slot) if type(genesis_block) != sm_class.block_class: raise BlockClassError( "Given genesis block class: {}, StateMachine.block_class: {}".format( type(genesis_block), sm_class.block_class ) ) chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_state(genesis_state) return cls._from_genesis_block(base_db, genesis_block) @classmethod def _from_genesis_block(cls, base_db: BaseAtomicDB, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': """ Initialize the ``BeaconChain`` from the genesis block. """ chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_block(genesis_block, genesis_block.__class__) return cls(base_db) # # StateMachine API # @classmethod def get_state_machine_class(cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: """ Returns the ``StateMachine`` instance for the given block slot number. """ return cls.get_state_machine_class_for_block_slot(block.slot) @classmethod def get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']: """ Return the ``StateMachine`` class for the given block slot number. """ if cls.sm_configuration is None: raise AttributeError("Chain classes must define the StateMachines in sm_configuration") validate_slot(slot) for start_slot, sm_class in reversed(cls.sm_configuration): if slot >= start_slot: return sm_class raise StateMachineNotFound("No StateMachine available for block slot: #{0}".format(slot)) def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': """ Return the ``StateMachine`` instance for the given block number. """ block = self.ensure_block(at_block) sm_class = self.get_state_machine_class_for_block_slot(block.slot) return sm_class( chaindb=self.chaindb, block=block, ) # # Block API # def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: slot = self.chaindb.get_slot_by_root(block_root) sm_class = self.get_state_machine_class_for_block_slot(slot) block_class = sm_class.block_class return block_class def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: """ Passthrough helper to the ``StateMachine`` class of the block descending from the given block. """ return self.get_state_machine_class_for_block_slot( slot=parent_block.slot + 1 if block_params.slot is None else block_params.slot, ).create_block_from_parent(parent_block, block_params) def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: """ Return the requested block as specified by block hash. Raise ``BlockNotFound`` if there's no block with the given hash in the db. """ validate_word(block_root, title="Block Hash") block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_canonical_head(self) -> BaseBeaconBlock: """ Return the block at the canonical chain head. Raise ``CanonicalHeadNotFound`` if there's no head defined for the canonical chain. """ block_root = self.chaindb.get_canonical_head_root() block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_score(self, block_root: Hash32) -> int: """ Return the score of the block with the given hash. Raise ``BlockNotFound`` if there is no matching black hash. """ return self.chaindb.get_score(block_root) def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: """ Return ``block`` if it is not ``None``, otherwise return the block of the canonical head. """ if block is None: head = self.get_canonical_head() return self.create_block_from_parent(head, FromBlockParams()) else: return block def get_block(self) -> BaseBeaconBlock: """ Return the current TIP block. """ return self.get_state_machine().block def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: """ Return the block with the given number in the canonical chain. Raise ``BlockNotFound`` if there's no block with the given number in the canonical chain. """ validate_slot(slot) return self.get_block_by_root(self.chaindb.get_canonical_block_root(slot)) def get_canonical_block_root(self, slot: Slot) -> Hash32: """ Return the block hash with the given number in the canonical chain. Raise ``BlockNotFound`` if there's no block with the given number in the canonical chain. """ return self.chaindb.get_canonical_block_root(slot) def import_block( self, block: BaseBeaconBlock, perform_validation: bool=True ) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: """ Import a complete block and returns a 3-tuple - the imported block - a tuple of blocks which are now part of the canonical chain. - a tuple of blocks which were canonical and now are no longer canonical. """ try: parent_block = self.get_block_by_root(block.previous_block_root) except BlockNotFound: raise ValidationError( "Attempt to import block #{}. Cannot import block {} before importing " "its parent block at {}".format( block.slot, block.signed_root, block.previous_block_root, ) ) base_block_for_import = self.create_block_from_parent( parent_block, FromBlockParams(), ) state, imported_block = self.get_state_machine(base_block_for_import).import_block(block) # Validate the imported block. if perform_validation: validate_imported_block_unchanged(imported_block, block) # TODO: Now it just persists all state. Should design how to clean up the old state. self.chaindb.persist_state(state) ( new_canonical_blocks, old_canonical_blocks, ) = self.chaindb.persist_block(imported_block, imported_block.__class__) self.logger.debug( 'IMPORTED_BLOCK: slot %s | signed root %s', imported_block.slot, encode_hex(imported_block.signed_root), ) return imported_block, new_canonical_blocks, old_canonical_blocks
[((169, 13, 169, 64), 'logging.getLogger', 'logging.getLogger', ({(169, 31, 169, 63): '"""eth2.beacon.chains.BeaconChain"""'}, {}), "('eth2.beacon.chains.BeaconChain')", False, 'import logging\n'), ((249, 8, 249, 27), 'eth2.beacon.validation.validate_slot', 'validate_slot', ({(249, 22, 249, 26): 'slot'}, {}), '(slot)', False, 'from eth2.beacon.validation import validate_slot\n'), ((293, 8, 293, 53), 'eth.validation.validate_word', 'validate_word', (), '', False, 'from eth.validation import validate_word\n'), ((341, 8, 341, 27), 'eth2.beacon.validation.validate_slot', 'validate_slot', ({(341, 22, 341, 26): 'slot'}, {}), '(slot)', False, 'from eth2.beacon.validation import validate_slot\n'), ((379, 12, 379, 29), 'eth2.beacon.typing.FromBlockParams', 'FromBlockParams', ({}, {}), '()', False, 'from eth2.beacon.typing import FromBlockParams, Slot\n'), ((385, 12, 385, 68), 'eth2._utils.ssz.validate_imported_block_unchanged', 'validate_imported_block_unchanged', ({(385, 46, 385, 60): 'imported_block', (385, 62, 385, 67): 'block'}, {}), '(imported_block, block)', False, 'from eth2._utils.ssz import validate_imported_block_unchanged\n'), ((398, 12, 398, 50), 'eth_utils.encode_hex', 'encode_hex', ({(398, 23, 398, 49): 'imported_block.signed_root'}, {}), '(imported_block.signed_root)', False, 'from eth_utils import ValidationError, encode_hex\n'), ((324, 55, 324, 72), 'eth2.beacon.typing.FromBlockParams', 'FromBlockParams', ({}, {}), '()', False, 'from eth2.beacon.typing import FromBlockParams, Slot\n')]
allupramodreddy/cisco_py
using_paramiko.py
5488b56d9324011860b78998e694dcce6da5e3d1
#!/usr/local/bin/python3 import paramiko,time #using as SSH Client client = paramiko.SSHClient() # check dir(client) to find available options. # auto adjust host key verification with yes or no client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # time for connecting to remote Cisco IOS """ Manually taking input addr = input('Provide IP address to connect to: ') user = input('Username: ') pwd = getpass.getpass('Password: ')""" # Taking input from files f1 = open("devices.txt","r") f2 = open("commands.txt","r") for line in f1: client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) data = line.split(" ") # print(data) addr = data[0] user = data[1] pwd = data[2] f3 = open(addr+".txt","w+") # print(addr +" "+ user +" " +pwd) client.connect(addr,username=user,password=pwd,allow_agent=False,look_for_keys=False) # we have to ask for Shell device_access = client.invoke_shell() for line in f2: device_access.send(line) time.sleep(1) output = device_access.recv(55000).decode('ascii') f3.write(output) """ THIS CODE IS FOR SINGLE COMMAND, FOR MULTIPLE COMMANDS CODE BELOW # send command to the device device_access.send("ter len 0\nshow run \n") time.sleep(2) # receive output from the device, convert it to byte-like format and print it print(device_access.recv(550000).decode('ascii')) # We can print the same to a file too with open("csr1000v.txt","w") as f: f.write(device_access.recv(550000).decode('ascii'))"""
[((7, 9, 7, 29), 'paramiko.SSHClient', 'paramiko.SSHClient', ({}, {}), '()', False, 'import paramiko, time\n'), ((11, 35, 11, 59), 'paramiko.AutoAddPolicy', 'paramiko.AutoAddPolicy', ({}, {}), '()', False, 'import paramiko, time\n'), ((27, 13, 27, 33), 'paramiko.SSHClient', 'paramiko.SSHClient', ({}, {}), '()', False, 'import paramiko, time\n'), ((28, 39, 28, 63), 'paramiko.AutoAddPolicy', 'paramiko.AutoAddPolicy', ({}, {}), '()', False, 'import paramiko, time\n'), ((48, 8, 48, 21), 'time.sleep', 'time.sleep', ({(48, 19, 48, 20): '(1)'}, {}), '(1)', False, 'import paramiko, time\n')]
pscly/bisai1
old/.history/a_20201125192943.py
e619186cec5053a8e02bd59e48fc3ad3af47d19a
# for n in range(400,500): # i = n // 100 # j = n // 10 % 10 # k = n % 10 # if n == i ** 3 + j ** 3 + k ** 3: # print(n) # 第一道题(16) # input("请输入(第一次):") # s1 = input("请输入(第二次):") # l1 = s1.split(' ') # l2 = [] # for i in l1: # if i.isdigit(): # l2.append(int(i)) # for i in l2: # if not (i % 6): # print(i, end=" ") # 第二道题(17) out_l1 = [] def bian_int_list(l1): re_l1 = [] # 返回出去的列表 for i in l1: re_l1.append(i) def jisuan(str_num): he1 = 0 global out_l1 for i in l1(): he1 += int(i)**2 if he1 > int(str_num): out_l1.append(str_num) return None while 1: in_1 = input("请输入数值:") nums_l1 = in_1.split(' ')
[]
muggat0n/graphdb
graphdb/transformer.py
56dfd5ef8a3321abc6a919faee47494bbe059080
""" A query transformer is a function that accepts a program and returns a program, plus a priority level. Higher priority transformers are placed closer to the front of the list. We’re ensuring is a function, because we’re going to evaluate it later 31 . We’ll assume there won’t be an enormous number of transformer additions, and walk the list linearly to add a new one. We’ll leave a note in case this assumption turns out to be false — a binary search is much more time-optimal for long lists, but adds a little complexity and doesn’t really speed up short lists. """ class Transformer: def __init__(self): self.T = [] def transform(self, program): return program """ Dagoba.T = [] # transformers (more than meets the eye) """ """ Dagoba.addTransformer = function(fun, priority) { if(typeof fun != 'function') return Dagoba.error('Invalid transformer function') for(var i = 0; i < Dagoba.T.length; i++) # OPT: binary search if(priority > Dagoba.T[i].priority) break Dagoba.T.splice(i, 0, {priority: priority, fun: fun}) } """ """ Dagoba.transform = function(program) { return Dagoba.T.reduce(function(acc, transformer) { return transformer.fun(acc) }, program) } """ """ Dagoba.addAlias = function(newname, oldname, defaults) { defaults = defaults || [] # default arguments for the alias Dagoba.addPipetype(newname, function() {}) # because there's no method catchall in js Dagoba.addTransformer(function(program) { return program.map(function(step) { if(step[0] != newname) return step return [oldname, Dagoba.extend(step[1], defaults)] }) }, 100) # these need to run early, so they get a high priority } """ """ Dagoba.extend = function(list, defaults) { return Object.keys(defaults).reduce(function(acc, key) { if(typeof list[key] != 'undefined') return acc acc[key] = defaults[key] return acc }, list) } """
[]
lixuemin13/yz-core
yzcore/templates/project_template/src/const/_job.py
82774f807ac1002b77d0cc90f6695b1cc6ba0820
#!/usr/bin/python3.6.8+ # -*- coding:utf-8 -*- """ @auth: cml @date: 2020-12-2 @desc: ... """ class JobStatus(object): PENDING = 0 # 任务等待执行 STARTED = 100 # 任务执行开始 PROCESS = 110 POLLING = 120 CALLBACK = 130 SUCCESS = 200 # 任务执行成功 RETRY = 300 # 任务重试 FAILURE = 400 # 任务执行失败 REVOKED = 500 # 任务撤销
[]
RenanPalmeira/pyboleto
pyboleto/html.py
7b12a7a2f7e92cad5f35f843ae67c397b6f7e36e
# -*- coding: utf-8 -*- """ pyboleto.html ~~~~~~~~~~~~~ Classe Responsável por fazer o output do boleto em html. :copyright: © 2012 by Artur Felipe de Sousa :license: BSD, see LICENSE for more details. """ import os import string import sys import codecs import base64 from itertools import chain if sys.version_info < (3,): from itertools import izip_longest as zip_longest zip_longest # chamando para evitar erro de nao uso do zip_longest else: from itertools import zip_longest DIGITS = [ ['n', 'n', 'w', 'w', 'n'], ['w', 'n', 'n', 'n', 'w'], ['n', 'w', 'n', 'n', 'w'], ['w', 'w', 'n', 'n', 'n'], ['n', 'n', 'w', 'n', 'w'], ['w', 'n', 'w', 'n', 'n'], ['n', 'w', 'w', 'n', 'n'], ['n', 'n', 'n', 'w', 'w'], ['w', 'n', 'n', 'w', 'n'], ['n', 'w', 'n', 'w', 'n'], ] class BoletoHTML(object): """Geração do Boleto em HTML Esta classe é responsável por imprimir o boleto em HTML. Outras classes podem ser implementadas no futuro com a mesma interface, para fazer output em LaTeX, etc ... Esta classe pode imprimir boletos em formato de carnê (2 boletos por página) ou em formato de folha cheia. :param file_descr: Um arquivo ou *file-like* class. :param landscape: Formato da folha. Usar ``True`` para boleto tipo carnê. """ def __init__(self, file_descr, landscape=False): # Tamanhos em px self.width = 750 self.widthCanhoto = 0 self.fontSizeTitle = 9 self.heightLine = 27 self.fontSizeValue = 12 self.title = 'Boleto bancário' self.fileDescr = file_descr if landscape: raise NotImplementedError('Em desenvolvimento...') else: tpl = string.Template(self._load_template('head.html')) self.html = tpl.substitute(title=self.title, width=self.width, font_size_value=self.fontSizeValue, height_line=self.heightLine, font_size_title=self.fontSizeTitle) def _load_template(self, template): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) template_path = os.path.join(pyboleto_dir, 'templates', template) with open(template_path, 'r') as tpl: template_content = tpl.read() return template_content def _load_image(self, logo_image): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) image_path = os.path.join(pyboleto_dir, 'media', logo_image) return image_path def _drawReciboSacado(self, boletoDados): """Imprime o Recibo do Sacado para modelo de página inteira :param boletoDados: Objeto com os dados do boleto a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` """ tpl = string.Template(self._load_template('recibo_sacado.html')) tpl_data = {} # Cabeçalho tpl_data['logo_img'] = '' if boletoDados.logo_image: img = codecs.open(self._load_image(boletoDados.logo_image)) aux = img.read() aux = base64.b64encode(aux) img_base64 = 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco # Corpo tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente tpl_data['cedente_documento'] = boletoDados.cedente_documento data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') tpl_data['sacado'] = boletoDados.sacado[0] tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['numero_documento'] = boletoDados.numero_documento data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['cedente_endereco'] = boletoDados.cedente_endereco valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Demonstrativo tpl_data['demonstrativo'] = '' for dm in boletoDados.demonstrativo: tpl_data['demonstrativo'] += '<p>{0}</p>'.format(dm) self.html += tpl.substitute(tpl_data) def _drawHorizontalCorteLine(self): self.html += '<hr />' def _drawReciboCaixa(self, boletoDados): """Imprime o Recibo do Caixa :param boletoDados: Objeto com os dados do boleto a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` """ tpl = string.Template(self._load_template('recibo_caixa.html')) tpl_data = {} # Cabeçalho tpl_data['logo_img'] = '' if boletoDados.logo_image: img = codecs.open(self._load_image(boletoDados.logo_image)) aux = img.read() aux = base64.b64encode(aux) img_base64 = 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco tpl_data['linha_digitavel'] = boletoDados.linha_digitavel # Corpo data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') # value em unicode em data.py if isinstance(boletoDados.local_pagamento, unicode): tpl_data['local_pagamento'] = boletoDados.local_pagamento.encode ('utf-8') else: tpl_data['local_pagamento'] = boletoDados.local_pagamento tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['numero_documento'] = boletoDados.numero_documento tpl_data['especie_documento'] = boletoDados.especie_documento tpl_data['aceite'] = boletoDados.aceite data_process = boletoDados.data_processamento tpl_data['data_processamento'] = data_process.strftime('%d/%m/%Y') tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['carteira'] = boletoDados.carteira tpl_data['especie'] = boletoDados.especie tpl_data['quantidade'] = boletoDados.quantidade valor = self._formataValorParaExibir(boletoDados.valor) tpl_data['valor'] = valor valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Instruções tpl_data['instrucoes'] = '' for instrucao in boletoDados.instrucoes: tpl_data['instrucoes'] += '<p>{0}</p>'.format(instrucao) # Rodapé tpl_data['sacado_info'] = '' for linha_sacado in boletoDados.sacado: tpl_data['sacado_info'] += '<p>{0}</p>'.format(linha_sacado) # Código de barras tpl_data['barcode'] = self._codigoBarraI25(boletoDados.barcode) self.html += tpl.substitute(tpl_data) def drawCanhoto(self, html): if html: self.html += str(html) def printPage(self): self.html += '<script>window.print();</script>' def drawBoletoCarneDuplo(self, boletoDados1, boletoDados2=None): """Imprime um boleto tipo carnê com 2 boletos por página. :param boletoDados1: Objeto com os dados do boleto a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :param boletoDados2: Objeto com os dados do boleto a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados1: :class:`pyboleto.data.BoletoData` :type boletoDados2: :class:`pyboleto.data.BoletoData` """ raise NotImplementedError('Em desenvolvimento') def drawBoleto(self, boletoDados): """Imprime Boleto Convencional Você pode chamar este método diversas vezes para criar um arquivo com várias páginas, uma por boleto. :param boletoDados: Objeto com os dados do boleto a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` """ self._drawReciboSacado(boletoDados) self._drawHorizontalCorteLine() self._drawReciboCaixa(boletoDados) self._drawHorizontalCorteLine() def nextPage(self): """Força início de nova página""" self.html += '</div><div class="pagina">' def save(self): """Fecha boleto e constroi o arquivo""" self.html += '</div></body></html>' if hasattr(self.fileDescr, 'write'): self.fileDescr.write(self.html) else: with open(self.fileDescr, 'w') as fd: fd.write(self.html) def _formataValorParaExibir(self, nfloat): if nfloat: txt = nfloat txt = txt.replace('.', ',') else: txt = "" return txt def _codigoBarraI25(self, code): """Imprime Código de barras otimizado para boletos http://en.wikipedia.org/wiki/Interleaved_2_of_5 """ digits = ['n', 'n s', 'n', 'n s'] if len(code) % 2 != 0: code = '0' + code for digt1, digt2 in self._grouper(2, code): digt1_repr = DIGITS[int(digt1)] digt2_repr = map(lambda x: x + ' s', DIGITS[int(digt2)]) digits.extend(chain(*zip(digt1_repr, digt2_repr))) digits.extend(['w', 'n s', 'n']) result = [] for digit in digits: result.append('<span class="{0}"></span>'.format(digit)) return ''.join(result) def _grouper(self, n, iterable, fillvalue=None): """grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx""" args = [iter(iterable)] * n return zip_longest(fillvalue=fillvalue, *args)
[((76, 24, 76, 73), 'os.path.join', 'os.path.join', ({(76, 37, 76, 49): 'pyboleto_dir', (76, 51, 76, 62): '"""templates"""', (76, 64, 76, 72): 'template'}, {}), "(pyboleto_dir, 'templates', template)", False, 'import os\n'), ((83, 21, 83, 68), 'os.path.join', 'os.path.join', ({(83, 34, 83, 46): 'pyboleto_dir', (83, 48, 83, 55): '"""media"""', (83, 57, 83, 67): 'logo_image'}, {}), "(pyboleto_dir, 'media', logo_image)", False, 'import os\n'), ((286, 15, 286, 54), 'itertools.zip_longest', 'zip_longest', (), '', False, 'from itertools import zip_longest\n'), ((75, 39, 75, 64), 'os.path.abspath', 'os.path.abspath', ({(75, 55, 75, 63): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((82, 39, 82, 64), 'os.path.abspath', 'os.path.abspath', ({(82, 55, 82, 63): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((102, 18, 102, 39), 'base64.b64encode', 'base64.b64encode', ({(102, 35, 102, 38): 'aux'}, {}), '(aux)', False, 'import base64\n'), ((151, 18, 151, 39), 'base64.b64encode', 'base64.b64encode', ({(151, 35, 151, 38): 'aux'}, {}), '(aux)', False, 'import base64\n')]
emir-naiz/first_git_lesson
Courses/1 month/2 week/day 6/Formula.py
1fecf712290f6da3ef03deff518870d91638eb69
summary = 0 i = 0 while i < 5: summary = summary + i print(summary) i = i + 1
[]
Vicken-Ghoubiguian/Imtreat
tests/image_saver/image_saver_7.py
1f8e8406dc48af3b1e8e0c138a09aa1faee0b8a0
import imtreat img = imtreat.imageManagerClass.openImageFunction("../images/soleil.png", 0) img = imtreat.definedModesClass.detailEnhanceFunction(img) imtreat.imageManagerClass.saveImageFunction("/Téléchargements/", "image_1", ".png", img)
[((3, 6, 3, 76), 'imtreat.imageManagerClass.openImageFunction', 'imtreat.imageManagerClass.openImageFunction', ({(3, 50, 3, 72): '"""../images/soleil.png"""', (3, 74, 3, 75): '0'}, {}), "('../images/soleil.png', 0)", False, 'import imtreat\n'), ((5, 6, 5, 58), 'imtreat.definedModesClass.detailEnhanceFunction', 'imtreat.definedModesClass.detailEnhanceFunction', ({(5, 54, 5, 57): 'img'}, {}), '(img)', False, 'import imtreat\n'), ((7, 0, 7, 90), 'imtreat.imageManagerClass.saveImageFunction', 'imtreat.imageManagerClass.saveImageFunction', ({(7, 44, 7, 65): '"""/Téléchargements/"""', (7, 67, 7, 76): '"""image_1"""', (7, 78, 7, 84): '""".png"""', (7, 86, 7, 89): 'img'}, {}), "('/Téléchargements/', 'image_1',\n '.png', img)", False, 'import imtreat\n')]
raubvogel/nova
nova/conf/hyperv.py
b78be4e83cdc191e20a4a61b6aae72cb2b37f62b
# Copyright (c) 2016 TUBITAK BILGEM # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg hyperv_opt_group = cfg.OptGroup("hyperv", title='The Hyper-V feature', help=""" The hyperv feature allows you to configure the Hyper-V hypervisor driver to be used within an OpenStack deployment. """) hyperv_opts = [ cfg.FloatOpt('dynamic_memory_ratio', default=1.0, help=""" Dynamic memory ratio Enables dynamic memory allocation (ballooning) when set to a value greater than 1. The value expresses the ratio between the total RAM assigned to an instance and its startup RAM amount. For example a ratio of 2.0 for an instance with 1024MB of RAM implies 512MB of RAM allocated at startup. Possible values: * 1.0: Disables dynamic memory allocation (Default). * Float values greater than 1.0: Enables allocation of total implied RAM divided by this value for startup. """), cfg.BoolOpt('enable_instance_metrics_collection', default=False, help=""" Enable instance metrics collection Enables metrics collections for an instance by using Hyper-V's metric APIs. Collected data can be retrieved by other apps and services, e.g.: Ceilometer. """), cfg.StrOpt('instances_path_share', default="", help=""" Instances path share The name of a Windows share mapped to the "instances_path" dir and used by the resize feature to copy files to the target host. If left blank, an administrative share (hidden network share) will be used, looking for the same "instances_path" used locally. Possible values: * "": An administrative share will be used (Default). * Name of a Windows share. Related options: * "instances_path": The directory which will be used if this option here is left blank. """), cfg.BoolOpt('limit_cpu_features', default=False, help=""" Limit CPU features This flag is needed to support live migration to hosts with different CPU features and checked during instance creation in order to limit the CPU features used by the instance. """), cfg.IntOpt('mounted_disk_query_retry_count', default=10, min=0, help=""" Mounted disk query retry count The number of times to retry checking for a mounted disk. The query runs until the device can be found or the retry count is reached. Possible values: * Positive integer values. Values greater than 1 is recommended (Default: 10). Related options: * Time interval between disk mount retries is declared with "mounted_disk_query_retry_interval" option. """), cfg.IntOpt('mounted_disk_query_retry_interval', default=5, min=0, help=""" Mounted disk query retry interval Interval between checks for a mounted disk, in seconds. Possible values: * Time in seconds (Default: 5). Related options: * This option is meaningful when the mounted_disk_query_retry_count is greater than 1. * The retry loop runs with mounted_disk_query_retry_count and mounted_disk_query_retry_interval configuration options. """), cfg.IntOpt('power_state_check_timeframe', default=60, min=0, help=""" Power state check timeframe The timeframe to be checked for instance power state changes. This option is used to fetch the state of the instance from Hyper-V through the WMI interface, within the specified timeframe. Possible values: * Timeframe in seconds (Default: 60). """), cfg.IntOpt('power_state_event_polling_interval', default=2, min=0, help=""" Power state event polling interval Instance power state change event polling frequency. Sets the listener interval for power state events to the given value. This option enhances the internal lifecycle notifications of instances that reboot themselves. It is unlikely that an operator has to change this value. Possible values: * Time in seconds (Default: 2). """), cfg.StrOpt('qemu_img_cmd', default="qemu-img.exe", help=""" qemu-img command qemu-img is required for some of the image related operations like converting between different image types. You can get it from here: (http://qemu.weilnetz.de/) or you can install the Cloudbase OpenStack Hyper-V Compute Driver (https://cloudbase.it/openstack-hyperv-driver/) which automatically sets the proper path for this config option. You can either give the full path of qemu-img.exe or set its path in the PATH environment variable and leave this option to the default value. Possible values: * Name of the qemu-img executable, in case it is in the same directory as the nova-compute service or its path is in the PATH environment variable (Default). * Path of qemu-img command (DRIVELETTER:\PATH\TO\QEMU-IMG\COMMAND). Related options: * If the config_drive_cdrom option is False, qemu-img will be used to convert the ISO to a VHD, otherwise the config drive will remain an ISO. To use config drive with Hyper-V, you must set the ``mkisofs_cmd`` value to the full path to an ``mkisofs.exe`` installation. """), cfg.StrOpt('vswitch_name', help=""" External virtual switch name The Hyper-V Virtual Switch is a software-based layer-2 Ethernet network switch that is available with the installation of the Hyper-V server role. The switch includes programmatically managed and extensible capabilities to connect virtual machines to both virtual networks and the physical network. In addition, Hyper-V Virtual Switch provides policy enforcement for security, isolation, and service levels. The vSwitch represented by this config option must be an external one (not internal or private). Possible values: * If not provided, the first of a list of available vswitches is used. This list is queried using WQL. * Virtual switch name. """), cfg.IntOpt('wait_soft_reboot_seconds', default=60, min=0, help=""" Wait soft reboot seconds Number of seconds to wait for instance to shut down after soft reboot request is made. We fall back to hard reboot if instance does not shutdown within this window. Possible values: * Time in seconds (Default: 60). """), cfg.BoolOpt('config_drive_cdrom', default=False, help=""" Mount config drive as a CD drive. OpenStack can be configured to write instance metadata to a config drive, which is then attached to the instance before it boots. The config drive can be attached as a disk drive (default) or as a CD drive. Related options: * This option is meaningful with ``force_config_drive`` option set to ``True`` or when the REST API call to create an instance will have ``--config-drive=True`` flag. * ``config_drive_format`` option must be set to ``iso9660`` in order to use CD drive as the config drive image. * To use config drive with Hyper-V, you must set the ``mkisofs_cmd`` value to the full path to an ``mkisofs.exe`` installation. Additionally, you must set the ``qemu_img_cmd`` value to the full path to an ``qemu-img`` command installation. * You can configure the Compute service to always create a configuration drive by setting the ``force_config_drive`` option to ``True``. """), cfg.BoolOpt('config_drive_inject_password', default=False, help=""" Inject password to config drive. When enabled, the admin password will be available from the config drive image. Related options: * This option is meaningful when used with other options that enable config drive usage with Hyper-V, such as ``force_config_drive``. """), cfg.IntOpt('volume_attach_retry_count', default=10, min=0, help=""" Volume attach retry count The number of times to retry attaching a volume. Volume attachment is retried until success or the given retry count is reached. Possible values: * Positive integer values (Default: 10). Related options: * Time interval between attachment attempts is declared with volume_attach_retry_interval option. """), cfg.IntOpt('volume_attach_retry_interval', default=5, min=0, help=""" Volume attach retry interval Interval between volume attachment attempts, in seconds. Possible values: * Time in seconds (Default: 5). Related options: * This options is meaningful when volume_attach_retry_count is greater than 1. * The retry loop runs with volume_attach_retry_count and volume_attach_retry_interval configuration options. """), cfg.BoolOpt('enable_remotefx', default=False, help=""" Enable RemoteFX feature This requires at least one DirectX 11 capable graphics adapter for Windows / Hyper-V Server 2012 R2 or newer and RDS-Virtualization feature has to be enabled. Instances with RemoteFX can be requested with the following flavor extra specs: **os:resolution**. Guest VM screen resolution size. Acceptable values:: 1024x768, 1280x1024, 1600x1200, 1920x1200, 2560x1600, 3840x2160 ``3840x2160`` is only available on Windows / Hyper-V Server 2016. **os:monitors**. Guest VM number of monitors. Acceptable values:: [1, 4] - Windows / Hyper-V Server 2012 R2 [1, 8] - Windows / Hyper-V Server 2016 **os:vram**. Guest VM VRAM amount. Only available on Windows / Hyper-V Server 2016. Acceptable values:: 64, 128, 256, 512, 1024 """), cfg.BoolOpt('use_multipath_io', default=False, help=""" Use multipath connections when attaching iSCSI or FC disks. This requires the Multipath IO Windows feature to be enabled. MPIO must be configured to claim such devices. """), cfg.ListOpt('iscsi_initiator_list', default=[], help=""" List of iSCSI initiators that will be used for estabilishing iSCSI sessions. If none are specified, the Microsoft iSCSI initiator service will choose the initiator. """) ] def register_opts(conf): conf.register_group(hyperv_opt_group) conf.register_opts(hyperv_opts, group=hyperv_opt_group) def list_opts(): return {hyperv_opt_group: hyperv_opts}
[((18, 19, 23, 4), 'oslo_config.cfg.OptGroup', 'cfg.OptGroup', (), '', False, 'from oslo_config import cfg\n'), ((26, 4, 42, 4), 'oslo_config.cfg.FloatOpt', 'cfg.FloatOpt', (), '', False, 'from oslo_config import cfg\n'), ((43, 4, 51, 4), 'oslo_config.cfg.BoolOpt', 'cfg.BoolOpt', (), '', False, 'from oslo_config import cfg\n'), ((52, 4, 71, 4), 'oslo_config.cfg.StrOpt', 'cfg.StrOpt', (), '', False, 'from oslo_config import cfg\n'), ((72, 4, 80, 4), 'oslo_config.cfg.BoolOpt', 'cfg.BoolOpt', (), '', False, 'from oslo_config import cfg\n'), ((81, 4, 100, 4), 'oslo_config.cfg.IntOpt', 'cfg.IntOpt', (), '', False, 'from oslo_config import cfg\n'), ((101, 4, 119, 4), 'oslo_config.cfg.IntOpt', 'cfg.IntOpt', (), '', False, 'from oslo_config import cfg\n'), ((120, 4, 133, 4), 'oslo_config.cfg.IntOpt', 'cfg.IntOpt', (), '', False, 'from oslo_config import cfg\n'), ((134, 4, 149, 4), 'oslo_config.cfg.IntOpt', 'cfg.IntOpt', (), '', False, 'from oslo_config import cfg\n'), ((150, 4, 178, 4), 'oslo_config.cfg.StrOpt', 'cfg.StrOpt', (), '', False, 'from oslo_config import cfg\n'), ((179, 4, 197, 4), 'oslo_config.cfg.StrOpt', 'cfg.StrOpt', (), '', False, 'from oslo_config import cfg\n'), ((198, 4, 211, 4), 'oslo_config.cfg.IntOpt', 'cfg.IntOpt', (), '', False, 'from oslo_config import cfg\n'), ((212, 4, 234, 4), 'oslo_config.cfg.BoolOpt', 'cfg.BoolOpt', (), '', False, 'from oslo_config import cfg\n'), ((235, 4, 246, 4), 'oslo_config.cfg.BoolOpt', 'cfg.BoolOpt', (), '', False, 'from oslo_config import cfg\n'), ((247, 4, 264, 4), 'oslo_config.cfg.IntOpt', 'cfg.IntOpt', (), '', False, 'from oslo_config import cfg\n'), ((265, 4, 283, 4), 'oslo_config.cfg.IntOpt', 'cfg.IntOpt', (), '', False, 'from oslo_config import cfg\n'), ((284, 4, 311, 4), 'oslo_config.cfg.BoolOpt', 'cfg.BoolOpt', (), '', False, 'from oslo_config import cfg\n'), ((312, 4, 319, 4), 'oslo_config.cfg.BoolOpt', 'cfg.BoolOpt', (), '', False, 'from oslo_config import cfg\n'), ((320, 4, 327, 4), 'oslo_config.cfg.ListOpt', 'cfg.ListOpt', (), '', False, 'from oslo_config import cfg\n')]
theyadev/thierry-bot
src/fetchWords.py
f3c72998d4c16afbca77baf4cabaf0f547d51e94
import requests words_list = requests.get("https://raw.githubusercontent.com/atebits/Words/master/Words/fr.txt").text words_list = filter(lambda x: len(x) > 4, words_list.split('\n')) path = input("Chemin d'écriture ? (words.txt) ") if path == "": path = "./words.txt" with open(path, "w", encoding="utf-8") as file: file.write('\n'.join(words_list))
[((3, 13, 3, 96), 'requests.get', 'requests.get', ({(3, 26, 3, 95): '"""https://raw.githubusercontent.com/atebits/Words/master/Words/fr.txt"""'}, {}), "(\n 'https://raw.githubusercontent.com/atebits/Words/master/Words/fr.txt')", False, 'import requests\n')]
Zenahr/simple-music-gallery
inspiration/simplegallery/test/upload/variants/test_aws_uploader.py
2cf6e81208b721a91dcbf77e047c7f77182dd194
import unittest from unittest import mock import os import subprocess from testfixtures import TempDirectory from simplegallery.upload.uploader_factory import get_uploader class AWSUploaderTestCase(unittest.TestCase): def test_no_location(self): uploader = get_uploader('aws') self.assertFalse(uploader.check_location('')) @mock.patch('subprocess.run') def test_upload_gallery(self, subprocess_run): subprocess_run.return_value = subprocess.CompletedProcess([], returncode=0) with TempDirectory() as tempdir: # Setup mock file and uploader tempdir.write('index.html', b'') gallery_path = os.path.join(tempdir.path, 'index.html') uploader = get_uploader('aws') # Test upload to bucket uploader.upload_gallery('s3://testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) # Test upload to bucket without prefix uploader.upload_gallery('testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) # Test upload to bucket without trailing / uploader.upload_gallery('s3://testbucket/path', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) if __name__ == '__main__': unittest.main()
[((15, 5, 15, 33), 'unittest.mock.patch', 'mock.patch', ({(15, 16, 15, 32): '"""subprocess.run"""'}, {}), "('subprocess.run')", False, 'from unittest import mock\n'), ((42, 4, 42, 19), 'unittest.main', 'unittest.main', ({}, {}), '()', False, 'import unittest\n'), ((12, 19, 12, 38), 'simplegallery.upload.uploader_factory.get_uploader', 'get_uploader', ({(12, 32, 12, 37): '"""aws"""'}, {}), "('aws')", False, 'from simplegallery.upload.uploader_factory import get_uploader\n'), ((17, 38, 17, 83), 'subprocess.CompletedProcess', 'subprocess.CompletedProcess', (), '', False, 'import subprocess\n'), ((19, 13, 19, 28), 'testfixtures.TempDirectory', 'TempDirectory', ({}, {}), '()', False, 'from testfixtures import TempDirectory\n'), ((22, 27, 22, 67), 'os.path.join', 'os.path.join', ({(22, 40, 22, 52): 'tempdir.path', (22, 54, 22, 66): '"""index.html"""'}, {}), "(tempdir.path, 'index.html')", False, 'import os\n'), ((23, 23, 23, 42), 'simplegallery.upload.uploader_factory.get_uploader', 'get_uploader', ({(23, 36, 23, 41): '"""aws"""'}, {}), "('aws')", False, 'from simplegallery.upload.uploader_factory import get_uploader\n')]
kithsirij/NLP-based-Syllabus-Coverage-Exam-paper-checker-Tool
Qt_interface/add_subject.py
b7b38a7b7c6d0a2ad5264df32acd75cdef552bd0
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'add_subject.ui' # # Created by: PyQt4 UI code generator 4.11.4 # # WARNING! All changes made in this file will be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_Dialog_add_subject(object): def setupUi(self, Dialog_add_subject): Dialog_add_subject.setObjectName(_fromUtf8("Dialog_add_subject")) Dialog_add_subject.resize(568, 374) font = QtGui.QFont() font.setFamily(_fromUtf8("Times New Roman")) font.setPointSize(10) Dialog_add_subject.setFont(font) Dialog_add_subject.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8("Qt_interface/SE_syllabus/4zIr6y.jpg")), QtGui.QIcon.Normal, QtGui.QIcon.Off) Dialog_add_subject.setWindowIcon(icon) self.lbl_subject_name = QtGui.QLabel(Dialog_add_subject) self.lbl_subject_name.setGeometry(QtCore.QRect(50, 235, 131, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8("Times New Roman")) font.setPointSize(12) self.lbl_subject_name.setFont(font) self.lbl_subject_name.setObjectName(_fromUtf8("lbl_subject_name")) self.label_add_subject = QtGui.QLabel(Dialog_add_subject) self.label_add_subject.setGeometry(QtCore.QRect(220, 30, 151, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8("Times New Roman")) font.setPointSize(14) font.setBold(True) font.setWeight(75) self.label_add_subject.setFont(font) self.label_add_subject.setObjectName(_fromUtf8("label_add_subject")) self.lineEdit_subject_name = QtGui.QLineEdit(Dialog_add_subject) self.lineEdit_subject_name.setGeometry(QtCore.QRect(190, 230, 321, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8("Times New Roman")) font.setPointSize(12) self.lineEdit_subject_name.setFont(font) self.lineEdit_subject_name.setObjectName(_fromUtf8("lineEdit_subject_name")) self.label_year = QtGui.QLabel(Dialog_add_subject) self.label_year.setGeometry(QtCore.QRect(50, 95, 81, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8("Times New Roman")) font.setPointSize(12) self.label_year.setFont(font) self.label_year.setObjectName(_fromUtf8("label_year")) self.label_semester = QtGui.QLabel(Dialog_add_subject) self.label_semester.setGeometry(QtCore.QRect(50, 165, 91, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8("Times New Roman")) font.setPointSize(12) self.label_semester.setFont(font) self.label_semester.setObjectName(_fromUtf8("label_semester")) self.pushButton_save = QtGui.QPushButton(Dialog_add_subject) self.pushButton_save.setGeometry(QtCore.QRect(190, 290, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8("Times New Roman")) font.setPointSize(10) self.pushButton_save.setFont(font) icon1 = QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(_fromUtf8("Qt_interface/SE_syllabus/Save-as.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_save.setIcon(icon1) self.pushButton_save.setIconSize(QtCore.QSize(20, 20)) self.pushButton_save.setObjectName(_fromUtf8("pushButton_save")) self.pushButton_cancel = QtGui.QPushButton(Dialog_add_subject) self.pushButton_cancel.setGeometry(QtCore.QRect(340, 290, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8("Times New Roman")) self.pushButton_cancel.setFont(font) icon2 = QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap(_fromUtf8("Qt_interface/SE_syllabus/if_draw-08_725558.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_cancel.setIcon(icon2) self.pushButton_cancel.setIconSize(QtCore.QSize(20, 20)) self.pushButton_cancel.setObjectName(_fromUtf8("pushButton_cancel")) self.comboBox_year = QtGui.QComboBox(Dialog_add_subject) self.comboBox_year.setGeometry(QtCore.QRect(190, 91, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8("Times New Roman")) font.setPointSize(12) self.comboBox_year.setFont(font) self.comboBox_year.setObjectName(_fromUtf8("comboBox_year")) self.comboBox_semester = QtGui.QComboBox(Dialog_add_subject) self.comboBox_semester.setGeometry(QtCore.QRect(190, 160, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8("Times New Roman")) font.setPointSize(12) self.comboBox_semester.setFont(font) self.comboBox_semester.setObjectName(_fromUtf8("comboBox_semester")) self.retranslateUi(Dialog_add_subject) QtCore.QObject.connect(self.pushButton_cancel, QtCore.SIGNAL(_fromUtf8("clicked()")), self.lineEdit_subject_name.clear) QtCore.QMetaObject.connectSlotsByName(Dialog_add_subject) def retranslateUi(self, Dialog_add_subject): Dialog_add_subject.setWindowTitle(_translate("Dialog_add_subject", "Dialog", None)) self.lbl_subject_name.setText(_translate("Dialog_add_subject", "SUBJECT NAME", None)) self.label_add_subject.setText(_translate("Dialog_add_subject", "ADD SUBJECT", None)) self.label_year.setText(_translate("Dialog_add_subject", "YEAR", None)) self.label_semester.setText(_translate("Dialog_add_subject", "SEMESTER", None)) self.pushButton_save.setText(_translate("Dialog_add_subject", "SAVE", None)) self.pushButton_cancel.setText(_translate("Dialog_add_subject", "CANCEL", None)) if __name__ == "__main__": import sys app = QtGui.QApplication(sys.argv) Dialog_add_subject = QtGui.QDialog() ui = Ui_Dialog_add_subject() ui.setupUi(Dialog_add_subject) Dialog_add_subject.show() sys.exit(app.exec_())
[((126, 10, 126, 38), 'PyQt4.QtGui.QApplication', 'QtGui.QApplication', ({(126, 29, 126, 37): 'sys.argv'}, {}), '(sys.argv)', False, 'from PyQt4 import QtCore, QtGui\n'), ((127, 25, 127, 40), 'PyQt4.QtGui.QDialog', 'QtGui.QDialog', ({}, {}), '()', False, 'from PyQt4 import QtCore, QtGui\n'), ((20, 15, 20, 79), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', ({(20, 44, 20, 51): 'context', (20, 53, 20, 57): 'text', (20, 59, 20, 67): 'disambig', (20, 69, 20, 78): '_encoding'}, {}), '(context, text, disambig, _encoding)', False, 'from PyQt4 import QtCore, QtGui\n'), ((29, 15, 29, 28), 'PyQt4.QtGui.QFont', 'QtGui.QFont', ({}, {}), '()', False, 'from PyQt4 import QtCore, QtGui\n'), ((34, 15, 34, 28), 'PyQt4.QtGui.QIcon', 'QtGui.QIcon', ({}, {}), '()', False, 'from PyQt4 import QtCore, QtGui\n'), ((37, 32, 37, 64), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', ({(37, 45, 37, 63): 'Dialog_add_subject'}, {}), '(Dialog_add_subject)', False, 'from PyQt4 import QtCore, QtGui\n'), ((39, 15, 39, 28), 'PyQt4.QtGui.QFont', 'QtGui.QFont', ({}, {}), '()', False, 'from PyQt4 import QtCore, QtGui\n'), ((44, 33, 44, 65), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', ({(44, 46, 44, 64): 'Dialog_add_subject'}, {}), '(Dialog_add_subject)', False, 'from PyQt4 import QtCore, QtGui\n'), ((46, 15, 46, 28), 'PyQt4.QtGui.QFont', 'QtGui.QFont', ({}, {}), '()', False, 'from PyQt4 import QtCore, QtGui\n'), ((53, 37, 53, 72), 'PyQt4.QtGui.QLineEdit', 'QtGui.QLineEdit', ({(53, 53, 53, 71): 'Dialog_add_subject'}, {}), '(Dialog_add_subject)', False, 'from PyQt4 import QtCore, QtGui\n'), ((55, 15, 55, 28), 'PyQt4.QtGui.QFont', 'QtGui.QFont', ({}, {}), '()', False, 'from PyQt4 import QtCore, QtGui\n'), ((60, 26, 60, 58), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', ({(60, 39, 60, 57): 'Dialog_add_subject'}, {}), '(Dialog_add_subject)', False, 'from PyQt4 import QtCore, QtGui\n'), ((62, 15, 62, 28), 'PyQt4.QtGui.QFont', 'QtGui.QFont', ({}, {}), '()', False, 'from PyQt4 import QtCore, QtGui\n'), ((67, 30, 67, 62), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', ({(67, 43, 67, 61): 'Dialog_add_subject'}, {}), '(Dialog_add_subject)', False, 'from PyQt4 import QtCore, QtGui\n'), ((69, 15, 69, 28), 'PyQt4.QtGui.QFont', 'QtGui.QFont', ({}, {}), '()', False, 'from PyQt4 import QtCore, QtGui\n'), ((74, 31, 74, 68), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', ({(74, 49, 74, 67): 'Dialog_add_subject'}, {}), '(Dialog_add_subject)', False, 'from PyQt4 import QtCore, QtGui\n'), ((76, 15, 76, 28), 'PyQt4.QtGui.QFont', 'QtGui.QFont', ({}, {}), '()', False, 'from PyQt4 import QtCore, QtGui\n'), ((80, 16, 80, 29), 'PyQt4.QtGui.QIcon', 'QtGui.QIcon', ({}, {}), '()', False, 'from PyQt4 import QtCore, QtGui\n'), ((85, 33, 85, 70), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', ({(85, 51, 85, 69): 'Dialog_add_subject'}, {}), '(Dialog_add_subject)', False, 'from PyQt4 import QtCore, QtGui\n'), ((87, 15, 87, 28), 'PyQt4.QtGui.QFont', 'QtGui.QFont', ({}, {}), '()', False, 'from PyQt4 import QtCore, QtGui\n'), ((90, 16, 90, 29), 'PyQt4.QtGui.QIcon', 'QtGui.QIcon', ({}, {}), '()', False, 'from PyQt4 import QtCore, QtGui\n'), ((95, 29, 95, 64), 'PyQt4.QtGui.QComboBox', 'QtGui.QComboBox', ({(95, 45, 95, 63): 'Dialog_add_subject'}, {}), '(Dialog_add_subject)', False, 'from PyQt4 import QtCore, QtGui\n'), ((97, 15, 97, 28), 'PyQt4.QtGui.QFont', 'QtGui.QFont', ({}, {}), '()', False, 'from PyQt4 import QtCore, QtGui\n'), ((102, 33, 102, 68), 'PyQt4.QtGui.QComboBox', 'QtGui.QComboBox', ({(102, 49, 102, 67): 'Dialog_add_subject'}, {}), '(Dialog_add_subject)', False, 'from PyQt4 import QtCore, QtGui\n'), ((104, 15, 104, 28), 'PyQt4.QtGui.QFont', 'QtGui.QFont', ({}, {}), '()', False, 'from PyQt4 import QtCore, QtGui\n'), ((112, 8, 112, 65), 'PyQt4.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', ({(112, 46, 112, 64): 'Dialog_add_subject'}, {}), '(Dialog_add_subject)', False, 'from PyQt4 import QtCore, QtGui\n'), ((23, 15, 23, 68), 'PyQt4.QtGui.QApplication.translate', 'QtGui.QApplication.translate', ({(23, 44, 23, 51): 'context', (23, 53, 23, 57): 'text', (23, 59, 23, 67): 'disambig'}, {}), '(context, text, disambig)', False, 'from PyQt4 import QtCore, QtGui\n'), ((38, 42, 38, 72), 'PyQt4.QtCore.QRect', 'QtCore.QRect', ({(38, 55, 38, 57): '(50)', (38, 59, 38, 62): '(235)', (38, 64, 38, 67): '(131)', (38, 69, 38, 71): '(21)'}, {}), '(50, 235, 131, 21)', False, 'from PyQt4 import QtCore, QtGui\n'), ((45, 43, 45, 73), 'PyQt4.QtCore.QRect', 'QtCore.QRect', ({(45, 56, 45, 59): '(220)', (45, 61, 45, 63): '(30)', (45, 65, 45, 68): '(151)', (45, 70, 45, 72): '(31)'}, {}), '(220, 30, 151, 31)', False, 'from PyQt4 import QtCore, QtGui\n'), ((54, 47, 54, 78), 'PyQt4.QtCore.QRect', 'QtCore.QRect', ({(54, 60, 54, 63): '(190)', (54, 65, 54, 68): '(230)', (54, 70, 54, 73): '(321)', (54, 75, 54, 77): '(31)'}, {}), '(190, 230, 321, 31)', False, 'from PyQt4 import QtCore, QtGui\n'), ((61, 36, 61, 64), 'PyQt4.QtCore.QRect', 'QtCore.QRect', ({(61, 49, 61, 51): '(50)', (61, 53, 61, 55): '(95)', (61, 57, 61, 59): '(81)', (61, 61, 61, 63): '(21)'}, {}), '(50, 95, 81, 21)', False, 'from PyQt4 import QtCore, QtGui\n'), ((68, 40, 68, 69), 'PyQt4.QtCore.QRect', 'QtCore.QRect', ({(68, 53, 68, 55): '(50)', (68, 57, 68, 60): '(165)', (68, 62, 68, 64): '(91)', (68, 66, 68, 68): '(21)'}, {}), '(50, 165, 91, 21)', False, 'from PyQt4 import QtCore, QtGui\n'), ((75, 41, 75, 72), 'PyQt4.QtCore.QRect', 'QtCore.QRect', ({(75, 54, 75, 57): '(190)', (75, 59, 75, 62): '(290)', (75, 64, 75, 67): '(111)', (75, 69, 75, 71): '(31)'}, {}), '(190, 290, 111, 31)', False, 'from PyQt4 import QtCore, QtGui\n'), ((83, 41, 83, 61), 'PyQt4.QtCore.QSize', 'QtCore.QSize', ({(83, 54, 83, 56): '(20)', (83, 58, 83, 60): '(20)'}, {}), '(20, 20)', False, 'from PyQt4 import QtCore, QtGui\n'), ((86, 43, 86, 74), 'PyQt4.QtCore.QRect', 'QtCore.QRect', ({(86, 56, 86, 59): '(340)', (86, 61, 86, 64): '(290)', (86, 66, 86, 69): '(111)', (86, 71, 86, 73): '(31)'}, {}), '(340, 290, 111, 31)', False, 'from PyQt4 import QtCore, QtGui\n'), ((93, 43, 93, 63), 'PyQt4.QtCore.QSize', 'QtCore.QSize', ({(93, 56, 93, 58): '(20)', (93, 60, 93, 62): '(20)'}, {}), '(20, 20)', False, 'from PyQt4 import QtCore, QtGui\n'), ((96, 39, 96, 69), 'PyQt4.QtCore.QRect', 'QtCore.QRect', ({(96, 52, 96, 55): '(190)', (96, 57, 96, 59): '(91)', (96, 61, 96, 64): '(111)', (96, 66, 96, 68): '(31)'}, {}), '(190, 91, 111, 31)', False, 'from PyQt4 import QtCore, QtGui\n'), ((103, 43, 103, 74), 'PyQt4.QtCore.QRect', 'QtCore.QRect', ({(103, 56, 103, 59): '(190)', (103, 61, 103, 64): '(160)', (103, 66, 103, 69): '(111)', (103, 71, 103, 73): '(31)'}, {}), '(190, 160, 111, 31)', False, 'from PyQt4 import QtCore, QtGui\n')]
mdj2/django
tests/syncdb_signals/tests.py
e71b63e280559122371d125d75a593dc2435c394
from django.db.models import signals from django.test import TestCase from django.core import management from django.utils import six from shared_models import models PRE_SYNCDB_ARGS = ['app', 'create_models', 'verbosity', 'interactive', 'db'] SYNCDB_DATABASE = 'default' SYNCDB_VERBOSITY = 1 SYNCDB_INTERACTIVE = False class PreSyncdbReceiver(object): def __init__(self): self.call_counter = 0 self.call_args = None def __call__(self, signal, sender, **kwargs): self.call_counter = self.call_counter + 1 self.call_args = kwargs class OneTimeReceiver(object): """ Special receiver for handle the fact that test runner calls syncdb for several databases and several times for some of them. """ def __init__(self): self.call_counter = 0 self.call_args = None def __call__(self, signal, sender, **kwargs): # Although test runner calls syncdb for several databases, # testing for only one of them is quite sufficient. if kwargs['db'] == SYNCDB_DATABASE: self.call_counter = self.call_counter + 1 self.call_args = kwargs # we need to test only one call of syncdb signals.pre_syncdb.disconnect(pre_syncdb_receiver, sender=models) # We connect receiver here and not in unit test code because we need to # connect receiver before test runner creates database. That is, sequence of # actions would be: # # 1. Test runner imports this module. # 2. We connect receiver. # 3. Test runner calls syncdb for create default database. # 4. Test runner execute our unit test code. pre_syncdb_receiver = OneTimeReceiver() signals.pre_syncdb.connect(pre_syncdb_receiver, sender=models) class SyncdbSignalTests(TestCase): def test_pre_syncdb_call_time(self): self.assertEqual(pre_syncdb_receiver.call_counter, 1) def test_pre_syncdb_args(self): r = PreSyncdbReceiver() signals.pre_syncdb.connect(r, sender=models) management.call_command('syncdb', database=SYNCDB_DATABASE, verbosity=SYNCDB_VERBOSITY, interactive=SYNCDB_INTERACTIVE, load_initial_data=False, stdout=six.StringIO()) args = r.call_args self.assertEqual(r.call_counter, 1) self.assertEqual(set(args), set(PRE_SYNCDB_ARGS)) self.assertEqual(args['app'], models) self.assertEqual(args['verbosity'], SYNCDB_VERBOSITY) self.assertEqual(args['interactive'], SYNCDB_INTERACTIVE) self.assertEqual(args['db'], 'default')
[((54, 0, 54, 62), 'django.db.models.signals.pre_syncdb.connect', 'signals.pre_syncdb.connect', (), '', False, 'from django.db.models import signals\n'), ((63, 8, 63, 52), 'django.db.models.signals.pre_syncdb.connect', 'signals.pre_syncdb.connect', (), '', False, 'from django.db.models import signals\n'), ((42, 12, 42, 77), 'django.db.models.signals.pre_syncdb.disconnect', 'signals.pre_syncdb.disconnect', (), '', False, 'from django.db.models import signals\n'), ((66, 44, 66, 58), 'django.utils.six.StringIO', 'six.StringIO', ({}, {}), '()', False, 'from django.utils import six\n')]
gianscarpe/pytorch-lightning
pytorch_lightning/plugins/environments/slurm_environment.py
261ea90822e2bf1cfa5d56171ab1f95a81d5c571
# Copyright The PyTorch Lightning team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import os import re from pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment log = logging.getLogger(__name__) class SLURMEnvironment(ClusterEnvironment): """Cluster environment for training on a cluster managed by SLURM.""" @property def creates_processes_externally(self) -> bool: return True @staticmethod def detect() -> bool: """Returns ``True`` if the current process was launched on a SLURM cluster.""" return "SLURM_NTASKS" in os.environ @property def main_address(self) -> str: # figure out the root node addr slurm_nodelist = os.environ.get("SLURM_NODELIST") if slurm_nodelist: root_node = slurm_nodelist.split(" ")[0].split(",")[0] else: root_node = "127.0.0.1" root_node = self.resolve_root_node_address(root_node) os.environ["MASTER_ADDR"] = root_node log.debug(f"MASTER_ADDR: {os.environ['MASTER_ADDR']}") return root_node @property def main_port(self) -> int: # ----------------------- # SLURM JOB = PORT number # ----------------------- # this way every process knows what port to use default_port = os.environ.get("SLURM_JOB_ID") if default_port: # use the last 4 numbers in the job id as the id default_port = default_port[-4:] # all ports should be in the 10k+ range default_port = int(default_port) + 15000 else: default_port = 12910 # ----------------------- # PORT NUMBER = MASTER_PORT # ----------------------- # in case the user passed it in if "MASTER_PORT" in os.environ: default_port = os.environ["MASTER_PORT"] else: os.environ["MASTER_PORT"] = str(default_port) log.debug(f"MASTER_PORT: {os.environ['MASTER_PORT']}") return int(default_port) def world_size(self) -> int: return int(os.environ["SLURM_NTASKS"]) def set_world_size(self, size: int) -> None: log.debug("SLURMEnvironment.set_world_size was called, but setting world size is not allowed. Ignored.") def global_rank(self) -> int: return int(os.environ["SLURM_PROCID"]) def set_global_rank(self, rank: int) -> None: log.debug("SLURMEnvironment.set_global_rank was called, but setting global rank is not allowed. Ignored.") def local_rank(self) -> int: return int(os.environ["SLURM_LOCALID"]) def node_rank(self) -> int: return int(os.environ["SLURM_NODEID"]) def resolve_root_node_address(self, root_node: str) -> str: if "[" in root_node: name, numbers = root_node.split("[", maxsplit=1) number = numbers.split(",", maxsplit=1)[0] if "-" in number: number = number.split("-")[0] number = re.sub("[^0-9]", "", number) root_node = name + number return root_node
[((21, 6, 21, 33), 'logging.getLogger', 'logging.getLogger', ({(21, 24, 21, 32): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((39, 25, 39, 57), 'os.environ.get', 'os.environ.get', ({(39, 40, 39, 56): '"""SLURM_NODELIST"""'}, {}), "('SLURM_NODELIST')", False, 'import os\n'), ((56, 23, 56, 53), 'os.environ.get', 'os.environ.get', ({(56, 38, 56, 52): '"""SLURM_JOB_ID"""'}, {}), "('SLURM_JOB_ID')", False, 'import os\n'), ((103, 21, 103, 49), 're.sub', 're.sub', ({(103, 28, 103, 36): '"""[^0-9]"""', (103, 38, 103, 40): '""""""', (103, 42, 103, 48): 'number'}, {}), "('[^0-9]', '', number)", False, 'import re\n')]
ginkage/trackball-python
examples/mouse.py
06439ac77935f7fd9374bd4f535822e859734729
#!/usr/bin/env python import time import os import math from trackball import TrackBall print("""Trackball: Mouse Use the trackball as a mouse in Raspbian, with right-click when the switch is pressed. Press Ctrl+C to exit! """) trackball = TrackBall(interrupt_pin=4) trackball.set_rgbw(0, 0, 0, 0) # Check for xte (used to control mouse) use_xte = os.system('which xte') == 0 if use_xte == 0: raise RuntimeError("xte not found. Did you sudo apt install xautomation?") while True: up, down, left, right, switch, state = trackball.read() # Send movements and clicks to xte if switch: cmd = 'xte "mouseclick 1"' os.system(cmd) elif right or up or left or down: x = right - left x = math.copysign(x**2, x) y = down - up y = math.copysign(y**2, y) cmd = 'xte "mousermove {} {}"'.format(int(x), int(y)) os.system(cmd) time.sleep(0.0001)
[((15, 12, 15, 38), 'trackball.TrackBall', 'TrackBall', (), '', False, 'from trackball import TrackBall\n'), ((19, 10, 19, 32), 'os.system', 'os.system', ({(19, 20, 19, 31): '"""which xte"""'}, {}), "('which xte')", False, 'import os\n'), ((39, 4, 39, 22), 'time.sleep', 'time.sleep', ({(39, 15, 39, 21): '(0.0001)'}, {}), '(0.0001)', False, 'import time\n'), ((30, 8, 30, 22), 'os.system', 'os.system', ({(30, 18, 30, 21): 'cmd'}, {}), '(cmd)', False, 'import os\n'), ((33, 12, 33, 34), 'math.copysign', 'math.copysign', ({(33, 26, 33, 30): 'x ** 2', (33, 32, 33, 33): 'x'}, {}), '(x ** 2, x)', False, 'import math\n'), ((35, 12, 35, 34), 'math.copysign', 'math.copysign', ({(35, 26, 35, 30): 'y ** 2', (35, 32, 35, 33): 'y'}, {}), '(y ** 2, y)', False, 'import math\n'), ((37, 8, 37, 22), 'os.system', 'os.system', ({(37, 18, 37, 21): 'cmd'}, {}), '(cmd)', False, 'import os\n')]
artberryx/LSD
garaged/src/garage/tf/regressors/gaussian_mlp_regressor_model.py
99ee081de2502b4d13c140b474f772db8a5f92fe
"""GaussianMLPRegressorModel.""" import numpy as np import tensorflow as tf import tensorflow_probability as tfp from garage.experiment import deterministic from garage.tf.models import GaussianMLPModel class GaussianMLPRegressorModel(GaussianMLPModel): """GaussianMLPRegressor based on garage.tf.models.Model class. This class can be used to perform regression by fitting a Gaussian distribution to the outputs. Args: input_shape (tuple[int]): Input shape of the training data. output_dim (int): Output dimension of the model. name (str): Model name, also the variable scope. hidden_sizes (list[int]): Output dimension of dense layer(s) for the MLP for mean. For example, (32, 32) means the MLP consists of two hidden layers, each with 32 hidden units. hidden_nonlinearity (callable): Activation function for intermediate dense layer(s). It should return a tf.Tensor. Set it to None to maintain a linear activation. hidden_w_init (callable): Initializer function for the weight of intermediate dense layer(s). The function should return a tf.Tensor. hidden_b_init (callable): Initializer function for the bias of intermediate dense layer(s). The function should return a tf.Tensor. output_nonlinearity (callable): Activation function for output dense layer. It should return a tf.Tensor. Set it to None to maintain a linear activation. output_w_init (callable): Initializer function for the weight of output dense layer(s). The function should return a tf.Tensor. output_b_init (callable): Initializer function for the bias of output dense layer(s). The function should return a tf.Tensor. learn_std (bool): Is std trainable. init_std (float): Initial value for std. adaptive_std (bool): Is std a neural network. If False, it will be a parameter. std_share_network (bool): Boolean for whether mean and std share the same network. std_hidden_sizes (list[int]): Output dimension of dense layer(s) for the MLP for std. For example, (32, 32) means the MLP consists of two hidden layers, each with 32 hidden units. min_std (float): If not None, the std is at least the value of min_std, to avoid numerical issues. max_std (float): If not None, the std is at most the value of max_std, to avoid numerical issues. std_hidden_nonlinearity (callable): Nonlinearity for each hidden layer in the std network. std_hidden_w_init (callable): Initializer function for the weight of intermediate dense layer(s) in the std network. std_hidden_b_init (callable): Initializer function for the bias of intermediate dense layer(s) in the std network. std_output_nonlinearity (callable): Activation function for output dense layer in the std network. It should return a tf.Tensor. Set it to None to maintain a linear activation. std_output_w_init (callable): Initializer function for the weight of output dense layer(s) in the std network. std_parameterization (str): How the std should be parametrized. There are two options: - exp: the logarithm of the std will be stored, and applied a exponential transformation - softplus: the std will be computed as log(1+exp(x)) layer_normalization (bool): Bool for using layer normalization or not. """ def __init__(self, input_shape, output_dim, name='GaussianMLPRegressorModel', hidden_sizes=(32, 32), hidden_nonlinearity=tf.nn.tanh, hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), hidden_b_init=tf.zeros_initializer(), output_nonlinearity=None, output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), output_b_init=tf.zeros_initializer(), learn_std=True, adaptive_std=False, std_share_network=False, init_std=1.0, min_std=1e-6, max_std=None, std_hidden_sizes=(32, 32), std_hidden_nonlinearity=tf.nn.tanh, std_hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_hidden_b_init=tf.zeros_initializer(), std_output_nonlinearity=None, std_output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_parameterization='exp', layer_normalization=False): super().__init__(output_dim=output_dim, name=name, hidden_sizes=hidden_sizes, hidden_nonlinearity=hidden_nonlinearity, hidden_w_init=hidden_w_init, hidden_b_init=hidden_b_init, output_nonlinearity=output_nonlinearity, output_w_init=output_w_init, output_b_init=output_b_init, learn_std=learn_std, adaptive_std=adaptive_std, std_share_network=std_share_network, init_std=init_std, min_std=min_std, max_std=max_std, std_hidden_sizes=std_hidden_sizes, std_hidden_nonlinearity=std_hidden_nonlinearity, std_output_nonlinearity=std_output_nonlinearity, std_parameterization=std_parameterization, layer_normalization=layer_normalization) self._input_shape = input_shape def network_output_spec(self): """Network output spec. Return: list[str]: List of key(str) for the network outputs. """ return [ 'normalized_dist', 'normalized_mean', 'normalized_log_std', 'dist', 'mean', 'log_std', 'x_mean', 'x_std', 'y_mean', 'y_std' ] def _build(self, state_input, name=None): """Build model given input placeholder(s). Args: state_input (tf.Tensor): Place holder for state input. name (str): Inner model name, also the variable scope of the inner model, if exist. One example is garage.tf.models.Sequential. Return: tfp.distributions.MultivariateNormalDiag: Normlizaed distribution. tf.Tensor: Normalized mean. tf.Tensor: Normalized log_std. tfp.distributions.MultivariateNormalDiag: Vanilla distribution. tf.Tensor: Vanilla mean. tf.Tensor: Vanilla log_std. tf.Tensor: Mean for data. tf.Tensor: log_std for data. tf.Tensor: Mean for label. tf.Tensor: log_std for label. """ with tf.compat.v1.variable_scope('normalized_vars'): x_mean_var = tf.compat.v1.get_variable( name='x_mean', shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) x_std_var = tf.compat.v1.get_variable( name='x_std_var', shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) y_mean_var = tf.compat.v1.get_variable( name='y_mean_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) y_std_var = tf.compat.v1.get_variable( name='y_std_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) normalized_xs_var = (state_input - x_mean_var) / x_std_var _, normalized_dist_mean, normalized_dist_log_std = super()._build( normalized_xs_var) # Since regressor expects [N, *dims], we need to squeeze the extra # dimension normalized_dist_log_std = tf.squeeze(normalized_dist_log_std, 1) with tf.name_scope('mean_network'): means_var = normalized_dist_mean * y_std_var + y_mean_var with tf.name_scope('std_network'): log_stds_var = normalized_dist_log_std + tf.math.log(y_std_var) normalized_dist = tfp.distributions.MultivariateNormalDiag( loc=normalized_dist_mean, scale_diag=tf.exp(normalized_dist_log_std)) vanilla_dist = tfp.distributions.MultivariateNormalDiag( loc=means_var, scale_diag=tf.exp(log_stds_var)) return (normalized_dist, normalized_dist_mean, normalized_dist_log_std, vanilla_dist, means_var, log_stds_var, x_mean_var, x_std_var, y_mean_var, y_std_var) def clone(self, name): """Return a clone of the model. It copies the configuration and parameters of the primitive. Args: name (str): Name of the newly created model. It has to be different from source model if cloned under the same computational graph. Returns: garage.tf.policies.GaussianMLPModel: Newly cloned model. """ new_regressor = self.__class__( name=name, input_shape=self._input_shape, output_dim=self._output_dim, hidden_sizes=self._hidden_sizes, hidden_nonlinearity=self._hidden_nonlinearity, hidden_w_init=self._hidden_w_init, hidden_b_init=self._hidden_b_init, output_nonlinearity=self._output_nonlinearity, output_w_init=self._output_w_init, output_b_init=self._output_b_init, learn_std=self._learn_std, adaptive_std=self._adaptive_std, std_share_network=self._std_share_network, init_std=self._init_std, min_std=self._min_std, max_std=self._max_std, std_hidden_sizes=self._std_hidden_sizes, std_hidden_nonlinearity=self._std_hidden_nonlinearity, std_hidden_w_init=self._std_hidden_w_init, std_hidden_b_init=self._std_hidden_b_init, std_output_nonlinearity=self._std_output_nonlinearity, std_output_w_init=self._std_output_w_init, std_parameterization=self._std_parameterization, layer_normalization=self._layer_normalization) new_regressor.parameters = self.parameters return new_regressor
[((82, 31, 82, 53), 'tensorflow.zeros_initializer', 'tf.zeros_initializer', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((86, 31, 86, 53), 'tensorflow.zeros_initializer', 'tf.zeros_initializer', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((97, 35, 97, 57), 'tensorflow.zeros_initializer', 'tf.zeros_initializer', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((192, 34, 192, 72), 'tensorflow.squeeze', 'tf.squeeze', ({(192, 45, 192, 68): 'normalized_dist_log_std', (192, 70, 192, 71): '1'}, {}), '(normalized_dist_log_std, 1)', True, 'import tensorflow as tf\n'), ((159, 13, 159, 59), 'tensorflow.compat.v1.variable_scope', 'tf.compat.v1.variable_scope', ({(159, 41, 159, 58): '"""normalized_vars"""'}, {}), "('normalized_vars')", True, 'import tensorflow as tf\n'), ((194, 13, 194, 42), 'tensorflow.name_scope', 'tf.name_scope', ({(194, 27, 194, 41): '"""mean_network"""'}, {}), "('mean_network')", True, 'import tensorflow as tf\n'), ((197, 13, 197, 41), 'tensorflow.name_scope', 'tf.name_scope', ({(197, 27, 197, 40): '"""std_network"""'}, {}), "('std_network')", True, 'import tensorflow as tf\n'), ((81, 26, 81, 60), 'garage.experiment.deterministic.get_tf_seed_stream', 'deterministic.get_tf_seed_stream', ({}, {}), '()', False, 'from garage.experiment import deterministic\n'), ((85, 26, 85, 60), 'garage.experiment.deterministic.get_tf_seed_stream', 'deterministic.get_tf_seed_stream', ({}, {}), '()', False, 'from garage.experiment import deterministic\n'), ((96, 26, 96, 60), 'garage.experiment.deterministic.get_tf_seed_stream', 'deterministic.get_tf_seed_stream', ({}, {}), '()', False, 'from garage.experiment import deterministic\n'), ((100, 26, 100, 60), 'garage.experiment.deterministic.get_tf_seed_stream', 'deterministic.get_tf_seed_stream', ({}, {}), '()', False, 'from garage.experiment import deterministic\n'), ((198, 53, 198, 75), 'tensorflow.math.log', 'tf.math.log', ({(198, 65, 198, 74): 'y_std_var'}, {}), '(y_std_var)', True, 'import tensorflow as tf\n'), ((202, 23, 202, 54), 'tensorflow.exp', 'tf.exp', ({(202, 30, 202, 53): 'normalized_dist_log_std'}, {}), '(normalized_dist_log_std)', True, 'import tensorflow as tf\n'), ((205, 38, 205, 58), 'tensorflow.exp', 'tf.exp', ({(205, 45, 205, 57): 'log_stds_var'}, {}), '(log_stds_var)', True, 'import tensorflow as tf\n'), ((164, 28, 164, 50), 'tensorflow.zeros_initializer', 'tf.zeros_initializer', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((170, 28, 170, 49), 'tensorflow.ones_initializer', 'tf.ones_initializer', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((176, 28, 176, 50), 'tensorflow.zeros_initializer', 'tf.zeros_initializer', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((182, 28, 182, 49), 'tensorflow.ones_initializer', 'tf.ones_initializer', ({}, {}), '()', True, 'import tensorflow as tf\n')]
kim-sunghoon/DiracDeltaNet
test.py
7bcc0575f28715d9c7f737f8a239718320f9c05b
import torch import torch.nn as nn import torch.optim as optim import torch.nn.functional as F import torch.backends.cudnn as cudnn import torchvision import torchvision.transforms as transforms import torchvision.datasets as datasets import os import argparse from torch.autograd import Variable from extensions.utils import progress_bar from extensions.model_refinery_wrapper import ModelRefineryWrapper from extensions.refinery_loss import RefineryLoss from models import ShuffleNetv2_wrapper from models import DiracDeltaNet_wrapper parser = argparse.ArgumentParser(description='PyTorch imagenet inference') parser.add_argument('--datadir', help='path to dataset') parser.add_argument('--inputdir', help='path to input model') args = parser.parse_args() # Data print('==> Preparing data..') # Data loading code valdir = os.path.join(args.datadir, 'val') normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) transform_test = transforms.Compose([ transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), normalize, ]) #imagenet testset = datasets.ImageFolder(valdir, transform_test) num_classes=1000 testloader = torch.utils.data.DataLoader(testset, batch_size=1000, shuffle=False, pin_memory=True, num_workers=30) use_cuda = torch.cuda.is_available() print('Using input path: %s' % args.inputdir) checkpoint = torch.load(args.inputdir) init_net = checkpoint['net'] net=init_net.to('cpu') label_refinery=torch.load('./resnet50.t7') net = ModelRefineryWrapper(net, label_refinery) device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") print(device) if torch.cuda.device_count() > 1: print("Let's use", torch.cuda.device_count(), "GPUs!") net = nn.DataParallel(net) net=net.to(device) criterion = RefineryLoss() def accuracy(output, target, topk=(1,)): """Computes the precision@k for the specified values of k""" with torch.no_grad(): maxk = max(topk) batch_size = target.size(0) _, pred = output.topk(maxk, 1, True, True) pred = pred.t() correct = pred.eq(target.view(1, -1).expand_as(pred)) res = [] for k in topk: correct_k = correct[:k].view(-1).float().sum(0, keepdim=True) res.append(correct_k) return res def test(): net.eval() criterion.eval() test_loss = 0 correct_1 = 0 correct_5 = 0 total = 0 for batch_idx, (inputs, targets) in enumerate(testloader): if use_cuda: inputs, targets = inputs.cuda(device), targets.cuda(device) with torch.no_grad(): outputs = net(inputs) loss = criterion(outputs, targets) if isinstance(loss, tuple): loss_value, outputs = loss else: loss_value = loss test_loss += loss_value.item() prec1, prec5 = accuracy(outputs, targets, topk=(1, 5)) total += targets.size(0) correct_1 += prec1 correct_5 += prec5 progress_bar(batch_idx, len(testloader), 'Loss: %.3f | Acc: %.3f%% (%d/%d)' % (test_loss/(batch_idx+1), 100.*float(correct_1)/float(total), correct_1, total)) return 100.*float(correct_1)/float(total),100.*float(correct_5)/float(total),test_loss acc1,acc5,loss=test() print('top-1 accuracy: {0:.3f}%, top-5 accuracy: {1:.3f}%'.format(acc1,acc5))
[((23, 9, 23, 74), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (), '', False, 'import argparse\n'), ((32, 9, 32, 42), 'os.path.join', 'os.path.join', ({(32, 22, 32, 34): 'args.datadir', (32, 36, 32, 41): '"""val"""'}, {}), "(args.datadir, 'val')", False, 'import os\n'), ((34, 12, 35, 59), 'torchvision.transforms.Normalize', 'transforms.Normalize', (), '', True, 'import torchvision.transforms as transforms\n'), ((46, 10, 46, 54), 'torchvision.datasets.ImageFolder', 'datasets.ImageFolder', ({(46, 31, 46, 37): 'valdir', (46, 39, 46, 53): 'transform_test'}, {}), '(valdir, transform_test)', True, 'import torchvision.datasets as datasets\n'), ((49, 13, 49, 114), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (), '', False, 'import torch\n'), ((52, 11, 52, 36), 'torch.cuda.is_available', 'torch.cuda.is_available', ({}, {}), '()', False, 'import torch\n'), ((55, 13, 55, 38), 'torch.load', 'torch.load', ({(55, 24, 55, 37): 'args.inputdir'}, {}), '(args.inputdir)', False, 'import torch\n'), ((59, 15, 59, 42), 'torch.load', 'torch.load', ({(59, 26, 59, 41): '"""./resnet50.t7"""'}, {}), "('./resnet50.t7')", False, 'import torch\n'), ((60, 6, 60, 47), 'extensions.model_refinery_wrapper.ModelRefineryWrapper', 'ModelRefineryWrapper', ({(60, 27, 60, 30): 'net', (60, 32, 60, 46): 'label_refinery'}, {}), '(net, label_refinery)', False, 'from extensions.model_refinery_wrapper import ModelRefineryWrapper\n'), ((71, 12, 71, 26), 'extensions.refinery_loss.RefineryLoss', 'RefineryLoss', ({}, {}), '()', False, 'from extensions.refinery_loss import RefineryLoss\n'), ((65, 3, 65, 28), 'torch.cuda.device_count', 'torch.cuda.device_count', ({}, {}), '()', False, 'import torch\n'), ((67, 10, 67, 30), 'torch.nn.DataParallel', 'nn.DataParallel', ({(67, 26, 67, 29): 'net'}, {}), '(net)', True, 'import torch.nn as nn\n'), ((39, 12, 39, 34), 'torchvision.transforms.Resize', 'transforms.Resize', ({(39, 30, 39, 33): '256'}, {}), '(256)', True, 'import torchvision.transforms as transforms\n'), ((40, 12, 40, 38), 'torchvision.transforms.CenterCrop', 'transforms.CenterCrop', ({(40, 34, 40, 37): '224'}, {}), '(224)', True, 'import torchvision.transforms as transforms\n'), ((41, 12, 41, 33), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ({}, {}), '()', True, 'import torchvision.transforms as transforms\n'), ((62, 34, 62, 59), 'torch.cuda.is_available', 'torch.cuda.is_available', ({}, {}), '()', False, 'import torch\n'), ((66, 23, 66, 48), 'torch.cuda.device_count', 'torch.cuda.device_count', ({}, {}), '()', False, 'import torch\n'), ((77, 9, 77, 24), 'torch.no_grad', 'torch.no_grad', ({}, {}), '()', False, 'import torch\n'), ((102, 13, 102, 28), 'torch.no_grad', 'torch.no_grad', ({}, {}), '()', False, 'import torch\n')]
PaccMann/paccmann_chemistry
paccmann_chemistry/utils/hyperparams.py
f7e9735aafb936f837c38b5055c654be178f385f
"""Model Parameters Module.""" import torch.optim as optim from .search import SamplingSearch, GreedySearch, BeamSearch SEARCH_FACTORY = { 'sampling': SamplingSearch, 'greedy': GreedySearch, 'beam': BeamSearch, } OPTIMIZER_FACTORY = { 'adadelta': optim.Adadelta, 'adagrad': optim.Adagrad, 'adam': optim.Adam, 'adamax': optim.Adamax, 'rmsprop': optim.RMSprop, 'sgd': optim.SGD }
[]
francescodonato/GPflux
tests/gpflux/layers/test_latent_variable_layer.py
fe45b353243b31d9fa0ec0daeb1d39a2e78ba094
# # Copyright (c) 2021 The GPflux Contributors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import abc import numpy as np import pytest import tensorflow as tf import tensorflow_probability as tfp from gpflow.kullback_leiblers import gauss_kl from gpflux.encoders import DirectlyParameterizedNormalDiag from gpflux.layers import LatentVariableLayer, LayerWithObservations, TrackableLayer tf.keras.backend.set_floatx("float64") ############ # Utilities ############ def _zero_one_normal_prior(w_dim): """ N(0, I) prior """ return tfp.distributions.MultivariateNormalDiag(loc=np.zeros(w_dim), scale_diag=np.ones(w_dim)) def get_distributions_with_w_dim(): distributions = [] for d in [1, 5]: mean = np.zeros(d) scale_tri_l = np.eye(d) mvn = tfp.distributions.MultivariateNormalTriL(mean, scale_tri_l) std = np.ones(d) mvn_diag = tfp.distributions.MultivariateNormalDiag(mean, std) distributions.append((mvn, d)) distributions.append((mvn_diag, d)) return distributions ############ # Tests ############ @pytest.mark.parametrize("distribution, w_dim", get_distributions_with_w_dim()) def test_local_kls(distribution, w_dim): lv = LatentVariableLayer(encoder=None, prior=distribution) # test kl is 0 when posteriors == priors posterior = distribution assert lv._local_kls(posterior) == 0 # test kl > 0 when posteriors != priors batch_size = 10 params = distribution.parameters posterior_params = { k: [v + 0.5 for _ in range(batch_size)] for k, v in params.items() if isinstance(v, np.ndarray) } posterior = lv.distribution_class(**posterior_params) local_kls = lv._local_kls(posterior) assert np.all(local_kls > 0) assert local_kls.shape == (batch_size,) @pytest.mark.parametrize("w_dim", [1, 5]) def test_local_kl_gpflow_consistency(w_dim): num_data = 400 means = np.random.randn(num_data, w_dim) encoder = DirectlyParameterizedNormalDiag(num_data, w_dim, means) lv = LatentVariableLayer(encoder=encoder, prior=_zero_one_normal_prior(w_dim)) posteriors = lv._inference_posteriors( [np.random.randn(num_data, 3), np.random.randn(num_data, 2)] ) q_mu = posteriors.parameters["loc"] q_sqrt = posteriors.parameters["scale_diag"] gpflow_local_kls = gauss_kl(q_mu, q_sqrt) tfp_local_kls = tf.reduce_sum(lv._local_kls(posteriors)) np.testing.assert_allclose(tfp_local_kls, gpflow_local_kls, rtol=1e-10) class ArrayMatcher: def __init__(self, expected): self.expected = expected def __eq__(self, actual): return np.allclose(actual, self.expected, equal_nan=True) @pytest.mark.parametrize("w_dim", [1, 5]) def test_latent_variable_layer_losses(mocker, w_dim): num_data, x_dim, y_dim = 43, 3, 1 prior_shape = (w_dim,) posteriors_shape = (num_data, w_dim) prior = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*prior_shape), scale_diag=np.random.randn(*prior_shape) ** 2, ) posteriors = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*posteriors_shape), scale_diag=np.random.randn(*posteriors_shape) ** 2, ) encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(encoder=encoder, prior=prior) inputs = np.full((num_data, x_dim), np.nan) targets = np.full((num_data, y_dim), np.nan) observations = [inputs, targets] encoder_inputs = np.concatenate(observations, axis=-1) _ = lv(inputs) encoder.assert_not_called() assert lv.losses == [0.0] _ = lv(inputs, observations=observations, training=True) # assert_called_once_with uses == for comparison which fails on arrays encoder.assert_called_once_with(ArrayMatcher(encoder_inputs), training=True) expected_loss = [tf.reduce_mean(posteriors.kl_divergence(prior))] np.testing.assert_equal(lv.losses, expected_loss) # also checks shapes match @pytest.mark.parametrize("w_dim", [1, 5]) @pytest.mark.parametrize("seed2", [None, 42]) def test_latent_variable_layer_samples(mocker, test_data, w_dim, seed2): seed = 123 inputs, targets = test_data num_data, x_dim = inputs.shape prior_shape = (w_dim,) posteriors_shape = (num_data, w_dim) prior = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*prior_shape), scale_diag=np.random.randn(*prior_shape) ** 2, ) posteriors = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*posteriors_shape), scale_diag=np.random.randn(*posteriors_shape) ** 2, ) encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(prior=prior, encoder=encoder) tf.random.set_seed(seed) sample_prior = lv(inputs, seed=seed2) tf.random.set_seed(seed) prior_expected = np.concatenate([inputs, prior.sample(num_data, seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_prior, prior_expected) tf.random.set_seed(seed) sample_posterior = lv(inputs, observations=[inputs, targets], training=True, seed=seed2) tf.random.set_seed(seed) posterior_expected = np.concatenate([inputs, posteriors.sample(seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_posterior, posterior_expected) def test_no_tensorflow_metaclass_overwritten(): """ LayerWithObservations is a subclass of tf.keras.layers.Layer (via TrackableLayer); this test ensures that TrackableLayer does not have a metaclass, and hence by adding the ABCMeta to LayerWithObservations we are not accidentally removing some required TensorFlow magic metaclass. """ assert LayerWithObservations.__bases__ == (TrackableLayer,) assert type(TrackableLayer) is type assert type(LayerWithObservations) is abc.ABCMeta
[((29, 0, 29, 38), 'tensorflow.keras.backend.set_floatx', 'tf.keras.backend.set_floatx', ({(29, 28, 29, 37): '"""float64"""'}, {}), "('float64')", True, 'import tensorflow as tf\n'), ((84, 1, 84, 41), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(84, 25, 84, 32): '"""w_dim"""', (84, 34, 84, 40): '[1, 5]'}, {}), "('w_dim', [1, 5])", False, 'import pytest\n'), ((112, 1, 112, 41), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(112, 25, 112, 32): '"""w_dim"""', (112, 34, 112, 40): '[1, 5]'}, {}), "('w_dim', [1, 5])", False, 'import pytest\n'), ((150, 1, 150, 41), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(150, 25, 150, 32): '"""w_dim"""', (150, 34, 150, 40): '[1, 5]'}, {}), "('w_dim', [1, 5])", False, 'import pytest\n'), ((151, 1, 151, 45), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(151, 25, 151, 32): '"""seed2"""', (151, 34, 151, 44): '[None, 42]'}, {}), "('seed2', [None, 42])", False, 'import pytest\n'), ((64, 9, 64, 62), 'gpflux.layers.LatentVariableLayer', 'LatentVariableLayer', (), '', False, 'from gpflux.layers import LatentVariableLayer, LayerWithObservations, TrackableLayer\n'), ((80, 11, 80, 32), 'numpy.all', 'np.all', ({(80, 18, 80, 31): '(local_kls > 0)'}, {}), '(local_kls > 0)', True, 'import numpy as np\n'), ((87, 12, 87, 44), 'numpy.random.randn', 'np.random.randn', ({(87, 28, 87, 36): 'num_data', (87, 38, 87, 43): 'w_dim'}, {}), '(num_data, w_dim)', True, 'import numpy as np\n'), ((88, 14, 88, 69), 'gpflux.encoders.DirectlyParameterizedNormalDiag', 'DirectlyParameterizedNormalDiag', ({(88, 46, 88, 54): 'num_data', (88, 56, 88, 61): 'w_dim', (88, 63, 88, 68): 'means'}, {}), '(num_data, w_dim, means)', False, 'from gpflux.encoders import DirectlyParameterizedNormalDiag\n'), ((98, 23, 98, 45), 'gpflow.kullback_leiblers.gauss_kl', 'gauss_kl', ({(98, 32, 98, 36): 'q_mu', (98, 38, 98, 44): 'q_sqrt'}, {}), '(q_mu, q_sqrt)', False, 'from gpflow.kullback_leiblers import gauss_kl\n'), ((101, 4, 101, 75), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (), '', True, 'import numpy as np\n'), ((130, 9, 130, 58), 'gpflux.layers.LatentVariableLayer', 'LatentVariableLayer', (), '', False, 'from gpflux.layers import LatentVariableLayer, LayerWithObservations, TrackableLayer\n'), ((132, 13, 132, 47), 'numpy.full', 'np.full', ({(132, 21, 132, 38): '(num_data, x_dim)', (132, 40, 132, 46): 'np.nan'}, {}), '((num_data, x_dim), np.nan)', True, 'import numpy as np\n'), ((133, 14, 133, 48), 'numpy.full', 'np.full', ({(133, 22, 133, 39): '(num_data, y_dim)', (133, 41, 133, 47): 'np.nan'}, {}), '((num_data, y_dim), np.nan)', True, 'import numpy as np\n'), ((135, 21, 135, 58), 'numpy.concatenate', 'np.concatenate', (), '', True, 'import numpy as np\n'), ((147, 4, 147, 53), 'numpy.testing.assert_equal', 'np.testing.assert_equal', ({(147, 28, 147, 37): 'lv.losses', (147, 39, 147, 52): 'expected_loss'}, {}), '(lv.losses, expected_loss)', True, 'import numpy as np\n'), ((172, 9, 172, 58), 'gpflux.layers.LatentVariableLayer', 'LatentVariableLayer', (), '', False, 'from gpflux.layers import LatentVariableLayer, LayerWithObservations, TrackableLayer\n'), ((174, 4, 174, 28), 'tensorflow.random.set_seed', 'tf.random.set_seed', ({(174, 23, 174, 27): 'seed'}, {}), '(seed)', True, 'import tensorflow as tf\n'), ((176, 4, 176, 28), 'tensorflow.random.set_seed', 'tf.random.set_seed', ({(176, 23, 176, 27): 'seed'}, {}), '(seed)', True, 'import tensorflow as tf\n'), ((178, 4, 178, 63), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', ({(178, 34, 178, 46): 'sample_prior', (178, 48, 178, 62): 'prior_expected'}, {}), '(sample_prior, prior_expected)', True, 'import numpy as np\n'), ((180, 4, 180, 28), 'tensorflow.random.set_seed', 'tf.random.set_seed', ({(180, 23, 180, 27): 'seed'}, {}), '(seed)', True, 'import tensorflow as tf\n'), ((182, 4, 182, 28), 'tensorflow.random.set_seed', 'tf.random.set_seed', ({(182, 23, 182, 27): 'seed'}, {}), '(seed)', True, 'import tensorflow as tf\n'), ((184, 4, 184, 71), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', ({(184, 34, 184, 50): 'sample_posterior', (184, 52, 184, 70): 'posterior_expected'}, {}), '(sample_posterior, posterior_expected)', True, 'import numpy as np\n'), ((45, 15, 45, 26), 'numpy.zeros', 'np.zeros', ({(45, 24, 45, 25): 'd'}, {}), '(d)', True, 'import numpy as np\n'), ((46, 22, 46, 31), 'numpy.eye', 'np.eye', ({(46, 29, 46, 30): 'd'}, {}), '(d)', True, 'import numpy as np\n'), ((47, 14, 47, 73), 'tensorflow_probability.distributions.MultivariateNormalTriL', 'tfp.distributions.MultivariateNormalTriL', ({(47, 55, 47, 59): 'mean', (47, 61, 47, 72): 'scale_tri_l'}, {}), '(mean, scale_tri_l)', True, 'import tensorflow_probability as tfp\n'), ((49, 14, 49, 24), 'numpy.ones', 'np.ones', ({(49, 22, 49, 23): 'd'}, {}), '(d)', True, 'import numpy as np\n'), ((50, 19, 50, 70), 'tensorflow_probability.distributions.MultivariateNormalDiag', 'tfp.distributions.MultivariateNormalDiag', ({(50, 60, 50, 64): 'mean', (50, 66, 50, 69): 'std'}, {}), '(mean, std)', True, 'import tensorflow_probability as tfp\n'), ((109, 15, 109, 65), 'numpy.allclose', 'np.allclose', (), '', True, 'import numpy as np\n'), ((38, 56, 38, 71), 'numpy.zeros', 'np.zeros', ({(38, 65, 38, 70): 'w_dim'}, {}), '(w_dim)', True, 'import numpy as np\n'), ((38, 84, 38, 98), 'numpy.ones', 'np.ones', ({(38, 92, 38, 97): 'w_dim'}, {}), '(w_dim)', True, 'import numpy as np\n'), ((92, 9, 92, 37), 'numpy.random.randn', 'np.random.randn', ({(92, 25, 92, 33): 'num_data', (92, 35, 92, 36): '3'}, {}), '(num_data, 3)', True, 'import numpy as np\n'), ((92, 39, 92, 67), 'numpy.random.randn', 'np.random.randn', ({(92, 55, 92, 63): 'num_data', (92, 65, 92, 66): '2'}, {}), '(num_data, 2)', True, 'import numpy as np\n'), ((120, 12, 120, 41), 'numpy.random.randn', 'np.random.randn', ({(120, 28, 120, 40): '*prior_shape'}, {}), '(*prior_shape)', True, 'import numpy as np\n'), ((124, 12, 124, 46), 'numpy.random.randn', 'np.random.randn', ({(124, 28, 124, 45): '*posteriors_shape'}, {}), '(*posteriors_shape)', True, 'import numpy as np\n'), ((162, 12, 162, 41), 'numpy.random.randn', 'np.random.randn', ({(162, 28, 162, 40): '*prior_shape'}, {}), '(*prior_shape)', True, 'import numpy as np\n'), ((166, 12, 166, 46), 'numpy.random.randn', 'np.random.randn', ({(166, 28, 166, 45): '*posteriors_shape'}, {}), '(*posteriors_shape)', True, 'import numpy as np\n'), ((121, 19, 121, 48), 'numpy.random.randn', 'np.random.randn', ({(121, 35, 121, 47): '*prior_shape'}, {}), '(*prior_shape)', True, 'import numpy as np\n'), ((125, 19, 125, 53), 'numpy.random.randn', 'np.random.randn', ({(125, 35, 125, 52): '*posteriors_shape'}, {}), '(*posteriors_shape)', True, 'import numpy as np\n'), ((163, 19, 163, 48), 'numpy.random.randn', 'np.random.randn', ({(163, 35, 163, 47): '*prior_shape'}, {}), '(*prior_shape)', True, 'import numpy as np\n'), ((167, 19, 167, 53), 'numpy.random.randn', 'np.random.randn', ({(167, 35, 167, 52): '*posteriors_shape'}, {}), '(*posteriors_shape)', True, 'import numpy as np\n')]
actingweb/box-actingweb
aw-actor-trust.py
f586458484649aba927cd78c60b4d0fec7b82ca6
#!/usr/bin/env python # from actingweb import actor from actingweb import config from actingweb import trust from actingweb import auth import webapp2 import os from google.appengine.ext.webapp import template import json import logging import datetime import time # /trust handlers # # GET /trust with query parameters (relationship, type, and peerid) to retrieve trust relationships (auth: only creator and admins allowed) # POST /trust with json body to initiate a trust relationship between this # actor and another (reciprocal relationship) (auth: only creator and admins allowed) # POST /trust/{relationship} with json body to create new trust # relationship (see config.py for default relationship and auto-accept, no # auth required) # GET /trust/{relationship}}/{actorid} to get details on a specific relationship (auth: creator, admin, or peer secret) # POST /trust/{relationship}}/{actorid} to send information to a peer about changes in the relationship # PUT /trust/{relationship}}/{actorid} with a json body to change details on a relationship (baseuri, secret, desc) (auth: creator, # admin, or peer secret) # DELETE /trust/{relationship}}/{actorid} to delete a relationship (with # ?peer=true if the delete is from the peer) (auth: creator, admin, or # peer secret) # Handling requests to trust/ class rootHandler(webapp2.RequestHandler): def get(self, id): if self.request.get('_method') == 'POST': self.post(id) return (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust') if not myself or check.response["code"] != 200: return if not check.checkAuthorisation(path='trust', method='GET'): self.response.set_status(403) return relationship = '' type = '' peerid = '' relationship = self.request.get('relationship') type = self.request.get('type') peerid = self.request.get('peerid') relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid, type=type) if not relationships: self.response.set_status(404, 'Not found') return pairs = [] for rel in relationships: pairs.append({ 'baseuri': rel.baseuri, 'id': myself.id, 'peerid': rel.peerid, 'relationship': rel.relationship, 'approved': rel.approved, 'peer_approved': rel.peer_approved, 'verified': rel.verified, 'type': rel.type, 'desc': rel.desc, 'secret': rel.secret, }) out = json.dumps(pairs) self.response.write(out) self.response.headers["Content-Type"] = "application/json" self.response.set_status(200, 'Ok') def post(self, id): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust') if not myself or check.response["code"] != 200: return if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return secret = '' desc = '' relationship = Config.default_relationship type = '' try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'url' in params: url = params['url'] else: url = '' if 'relationship' in params: relationship = params['relationship'] if 'type' in params: type = params['type'] if 'desc' in params: desc = params['desc'] except ValueError: url = self.request.get('url') relationship = self.request.get('relationship') type = self.request.get('type') if len(url) == 0: self.response.set_status(400, 'Missing peer URL') return secret = Config.newToken() new_trust = myself.createReciprocalTrust( url=url, secret=secret, desc=desc, relationship=relationship, type=type) if not new_trust: self.response.set_status(408, 'Unable to create trust relationship') return self.response.headers.add_header( "Location", str(Config.root + myself.id + '/trust/' + new_trust.relationship + '/' + new_trust.peerid)) pair = { 'baseuri': new_trust.baseuri, 'id': myself.id, 'peerid': new_trust.peerid, 'relationship': new_trust.relationship, 'approved': new_trust.approved, 'peer_approved': new_trust.peer_approved, 'verified': new_trust.verified, 'type': new_trust.type, 'desc': new_trust.desc, 'secret': new_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers["Content-Type"] = "application/json" self.response.set_status(201, 'Created') # Handling requests to /trust/*, e.g. /trust/friend class relationshipHandler(webapp2.RequestHandler): def get(self, id, relationship): if self.request.get('_method') == 'POST': self.post(id, relationship) return self.response.set_status(404, "Not found") def put(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not myself: return if relationship != 'trustee': self.response.set_status(404, "Not found") return # Access is the same as /trust if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'trustee_root' in params: trustee_root = params['trustee_root'] else: trustee_root = '' if 'creator' in params: creator = params['creator'] else: creator = None except ValueError: self.response.set_status(400, 'No json content') return if len(trustee_root) > 0: myself.setProperty('trustee_root', trustee_root) if creator: myself.modify(creator=creator) self.response.set_status(204, 'No content') def delete(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not myself: return if relationship != 'trustee': self.response.set_status(404, "Not found") return # Access is the same as /trust if not check.checkAuthorisation(path='trust', method='DELETE'): self.response.set_status(403) return myself.deleteProperty('trustee_root') self.response.set_status(204, 'No content') def post(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not myself: return if not check.checkAuthorisation(path='trust', subpath='<type>', method='POST'): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'baseuri' in params: baseuri = params['baseuri'] else: baseuri = '' if 'id' in params: peerid = params['id'] else: peerid = '' if 'type' in params: type = params['type'] else: type = '' if 'secret' in params: secret = params['secret'] else: secret = '' if 'desc' in params: desc = params['desc'] else: desc = '' if 'verify' in params: verificationToken = params['verify'] else: verificationToken = None except ValueError: self.response.set_status(400, 'No json content') return if len(baseuri) == 0 or len(peerid) == 0 or len(type) == 0: self.response.set_status(400, 'Missing mandatory attributes') return if Config.auto_accept_default_relationship and Config.default_relationship == relationship: approved = True else: approved = False # Since we received a request for a relationship, assume that peer has approved new_trust = myself.createVerifiedTrust(baseuri=baseuri, peerid=peerid, approved=approved, secret=secret, verificationToken=verificationToken, type=type, peer_approved=True, relationship=relationship, desc=desc) if not new_trust: self.response.set_status(403, 'Forbidden') return self.response.headers.add_header( "Location", str(Config.root + myself.id + '/trust/' + new_trust.relationship + "/" + new_trust.peerid)) pair = { 'baseuri': new_trust.baseuri, 'id': myself.id, 'peerid': new_trust.peerid, 'relationship': new_trust.relationship, 'approved': new_trust.approved, 'peer_approved': new_trust.peer_approved, 'verified': new_trust.verified, 'type': new_trust.type, 'desc': new_trust.desc, 'secret': new_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers["Content-Type"] = "application/json" if approved: self.response.set_status(201, 'Created') else: self.response.set_status(202, 'Accepted') # Handling requests to specific relationships, e.g. /trust/friend/12f2ae53bd class trustHandler(webapp2.RequestHandler): def get(self, id, relationship, peerid): if self.request.get('_method') == 'PUT': self.put(id, relationship, peerid) return if self.request.get('_method') == 'DELETE': self.delete(id, relationship, peerid) return logging.debug('GET trust headers: ' + str(self.request.headers)) (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship) if not myself or check.response["code"] != 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='GET', peerid=peerid): self.response.set_status(403) return relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid) if not relationships: self.response.set_status(404, 'Not found') return my_trust = relationships[0] # If the peer did a GET to verify if check.trust and check.trust.peerid == peerid and not my_trust.verified: my_trust.modify(verified=True) verificationToken = my_trust.verificationToken else: verificationToken = '' pair = { 'baseuri': my_trust.baseuri, 'id': myself.id, 'peerid': my_trust.peerid, 'relationship': my_trust.relationship, 'approved': my_trust.approved, 'peer_approved': my_trust.peer_approved, 'verified': my_trust.verified, 'verificationToken': verificationToken, 'type': my_trust.type, 'desc': my_trust.desc, 'secret': my_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers["Content-Type"] = "application/json" if my_trust.approved: self.response.set_status(200, 'Ok') else: self.response.set_status(202, 'Accepted') def post(self, id, relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship) if not myself or check.response["code"] != 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='POST', peerid=peerid): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) peer_approved = None if 'approved' in params: if params['approved'] and params['approved'] == True: peer_approved = True except ValueError: self.response.set_status(400, 'No json content') return if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, peer_approved=peer_approved): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified') def put(self, id, relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship) if not myself or check.response["code"] != 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='PUT', peerid=peerid): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'baseuri' in params: baseuri = params['baseuri'] else: baseuri = '' if 'desc' in params: desc = params['desc'] else: desc = '' if 'approved' in params: if params['approved'] == True or params['approved'].lower() == "true": approved = True else: approved = None except ValueError: if not self.request.get('_method') or self.request.get('_method') != "PUT": self.response.set_status(400, 'No json content') return if self.request.get('approved') and len(self.request.get('approved')) > 0: if self.request.get('approved').lower() == "true": approved = True else: approved = None if self.request.get('baseuri') and len(self.request.get('baseuri')) > 0: baseuri = self.request.get('baseuri') else: baseuri = '' if self.request.get('desc') and len(self.request.get('desc')) > 0: desc = self.request.get('desc') else: desc = '' if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, baseuri=baseuri, approved=approved, desc=desc): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified') def delete(self, id, relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not myself or (check.response["code"] != 200 and check.response["code"] != 401): auth.add_auth_response(appreq=self, auth_obj=check) return # We allow non-approved peers to delete even if we haven't approved the relationship yet if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='DELETE', peerid=peerid, approved=False): self.response.set_status(403) return isPeer = False if check.trust and check.trust.peerid == peerid: isPeer = True else: # Use of GET param peer=true is a way of forcing no deletion of a peer # relationship even when requestor is not a peer (primarily for testing purposes) peerGet = self.request.get('peer').lower() if peerGet.lower() == "true": isPeer = True Config = config.config() relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid) if not relationships: self.response.set_status(404, 'Not found') return my_trust = relationships[0] if isPeer: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=False) else: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=True) if not deleted: self.response.set_status(502, 'Not able to delete relationship with peer.') return self.response.set_status(204, 'Ok') application = webapp2.WSGIApplication([ webapp2.Route(r'/<id>/trust<:/?>', rootHandler, name='rootHandler'), webapp2.Route(r'/<id>/trust/<relationship><:/?>', relationshipHandler, name='relationshipHandler'), webapp2.Route(r'/<id>/trust/<relationship>/<peerid><:/?>', trustHandler, name='trustHandler'), ], debug=True)
[((41, 34, 42, 74), 'actingweb.auth.init_actingweb', 'auth.init_actingweb', (), '', False, 'from actingweb import auth\n'), ((74, 14, 74, 31), 'json.dumps', 'json.dumps', ({(74, 25, 74, 30): 'pairs'}, {}), '(pairs)', False, 'import json\n'), ((80, 34, 81, 74), 'actingweb.auth.init_actingweb', 'auth.init_actingweb', (), '', False, 'from actingweb import auth\n'), ((131, 14, 131, 30), 'json.dumps', 'json.dumps', ({(131, 25, 131, 29): 'pair'}, {}), '(pair)', False, 'import json\n'), ((147, 34, 148, 116), 'actingweb.auth.init_actingweb', 'auth.init_actingweb', (), '', False, 'from actingweb import auth\n'), ((178, 34, 181, 73), 'actingweb.auth.init_actingweb', 'auth.init_actingweb', (), '', False, 'from actingweb import auth\n'), ((195, 34, 198, 73), 'actingweb.auth.init_actingweb', 'auth.init_actingweb', (), '', False, 'from actingweb import auth\n'), ((261, 14, 261, 30), 'json.dumps', 'json.dumps', ({(261, 25, 261, 29): 'pair'}, {}), '(pair)', False, 'import json\n'), ((281, 34, 282, 96), 'actingweb.auth.init_actingweb', 'auth.init_actingweb', (), '', False, 'from actingweb import auth\n'), ((313, 14, 313, 30), 'json.dumps', 'json.dumps', ({(313, 25, 313, 29): 'pair'}, {}), '(pair)', False, 'import json\n'), ((322, 34, 323, 96), 'actingweb.auth.init_actingweb', 'auth.init_actingweb', (), '', False, 'from actingweb import auth\n'), ((344, 34, 345, 96), 'actingweb.auth.init_actingweb', 'auth.init_actingweb', (), '', False, 'from actingweb import auth\n'), ((389, 34, 390, 116), 'actingweb.auth.init_actingweb', 'auth.init_actingweb', (), '', False, 'from actingweb import auth\n'), ((407, 17, 407, 32), 'actingweb.config.config', 'config.config', ({}, {}), '()', False, 'from actingweb import config\n'), ((425, 4, 425, 71), 'webapp2.Route', 'webapp2.Route', (), '', False, 'import webapp2\n'), ((426, 4, 427, 66), 'webapp2.Route', 'webapp2.Route', (), '', False, 'import webapp2\n'), ((428, 4, 428, 97), 'webapp2.Route', 'webapp2.Route', (), '', False, 'import webapp2\n'), ((392, 12, 392, 63), 'actingweb.auth.add_auth_response', 'auth.add_auth_response', (), '', False, 'from actingweb import auth\n')]
IamEld3st/RLBot
src/main/python/rlbot/version.py
36195ffd3a836ed910ce63aed8ba103b98b7b361
# Store the version here so: # 1) we don't load dependencies by storing it in __init__.py # 2) we can import it in setup.py for the same reason # 3) we can import it into your module module # https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package __version__ = '1.6.1' release_notes = { '1.6.1': """ Fixed GUI crash when loading certain RLBot config files with relative paths for agents. Fixed agent preset loading to allow multiple agents to saved/loaded correctly if they have the same name. - ima9rd """, '1.6.0':""" Add support for auto starting .NET executables. """, '1.5.1': """ Fixed crash with GUI when no default RLBot.cfg file was found. Updated GUI to launch Rocket League when clicking run if no Rocket League process is found. - ima9rd """, '1.5.0': """ Adding a have_internet helper function to help streamline upgrade checks. - ima9rd """, '1.4.2': """ Adding support for auto-running java bots during tournaments. To take advantage of this in your bot, see https://github.com/RLBot/RLBotJavaExample/wiki/Auto-Launching-Java Plus bug fixes: - Fixed a bug where auto-run executables would crash when trying to write to stderr. - Dragging bots to another team in the GUI no longer breaks the config. """, '1.3.0': """ Accurate ball prediction for Hoops and Dropshot modes! - Kipje13, Marvin, NeverCast, et. al. """, '1.2.6': """ Fixed a bug where field info was not extracted properly during dropshot mode. It was reporting 2 goals rather than the expected 140. """, '1.2.5': """ *************************************************** * Fix for dodge cancels / half flips! - ccman32 * *************************************************** Plus: - Changing the rendering strategy for 3D lines that go past the camera. Formerly it was "draw it, even though it's crazy sometimes", now it will be "don't draw it". - Showing the rate that inputs are received for each player index when you press the [home] key. Toggle back off with the [end] key. - Fixed a bug where party_member_bot could get influenced by real controller input. - Creating new presets in the GUI works better now. - Got rid of the libpng warning seen when using the GUI. - Giving specific error messages when cfg files are messed up. """, '1.2.2': """ - Rearranged the GUI a bit, and made it load and track appearance configs more effectively. - Fixed bug where RUN button behavior in the GUI would not work after killing bots. """, '1.2.0': """ - We now offer a 'RigidBodyTick' thanks to whatisaphone! It's a lower-level representation of physics data which updates at 120Hz and is not subject to interpolation. You can still make a great bot without it, but this feature is quite nice for the scientists among us. See https://github.com/RLBot/RLBotPythonExample/wiki/Rigid-Body-Tick for more details! - Faster way to access ball prediction data in python. - Skyborg """, '1.1.3': """ - Faster way to access ball prediction data in python. - Skyborg - Java bots will now shut down when the python framework quits. This has been necessary recently to avoid buggy situations. - Shutting down the python framework will no longer attempt to kill bots twice in a row. - Clicking on the "Run" button twice in a row in the GUI will no longer spawn duplicate processes. """, '1.1.2': """ Faster way to access ball prediction data in python. - Skyborg """, '1.1.1': """ You can now get information about the ball's status in Dropshot mode thanks to hallo_doei! Read all about it at https://github.com/RLBot/RLBot/wiki/Dropshot Other changes: - The loadout config for orange team is now respected again. - ccman32 - Fixed a bug where the GUI would crash with a "KeyError". - hallo_doei - Avoiding and suppressing some game crashes, and also restoring the ability to get game tick data during replays and the postgame. - tarehart - Fixed a bug where bots would dodge when they intended to double jump. -tarehart """, '1.0.6': """ The latest Rocket League patch broke dodges for our bots; this update fixes it. """, '1.0.5': """ Maximum size for a render message has been decreased again because many people experienced errors related to memory access. The limit is now only double the original. """, '1.0.4': """ - Maximum size for a render message has been increased by a factor of 100. This means you can draw a lot of lines at once without getting errors. - Boost amount for cars will now round up to the nearest integer, so 0.3% boost will now appear as 1 instead of 0. - Fixed a crash that would commonly happen after a match ends. As a side effect, you can no longer see up-to-date player data during instant replays. """, '1.0.3': """ Time for the big 1.0 release! We actually left "beta" a long time ago so this isn't as big a milestone as the number implies, but we DO have two great new features! 1. Setting game state. You can manipulate the position, velocity, etc of the ball and the cars! This can be a great help during bot development, and you can also get creative with it. Visit the wiki for details and documentation - https://github.com/RLBot/RLBot/wiki/Manipulating-Game-State Code written by hallo_doei, ccman32, and tarehart 2. Ball prediction. We now provide a list of future ball positions based on chip's excellent physics modeling. Take advantage of this to do next-level wall reads, catches, and dribbles! You can read about the math involved here: https://samuelpmish.github.io/notes/RocketLeague/ball_bouncing/ Note: currently the wall bounces are only accurate on the standard arena, not hoops or dropshot. Documentation and examples can be found here: https://github.com/RLBot/RLBot/wiki/Ball-Path-Prediction Code written by chip and tarehart Bonus: - You can now play on Salty Shores thanks to hallo_doei - Bug fix for people with spaces in their file path by Zaptive - Subprocess agent for future Rust support by whatisaphone """, '0.0.32': """ More comprehensive fix for Rocket League patch 1.50. Compared to previous version: - Dropshot tile data is fixed - Boost pad data is fixed - Loadout configuration is fixed Thanks to ccman32 and dtracers for delivering this fix quickly! """, '0.0.31': """ Rapid response to Rocket League patch 1.50 with the following known issues: - Dropshot tile data is missing - Boost pad data is missing - Loadout configuration is broken Thanks to ccman32 and dtracers for delivering this short-term fix quickly. We will follow this up with a proper fix as soon as possible. You may also choose to stay on Rocket League 1.49 and RLBot 0.0.30, ask for instructions on discord. """, '0.0.30': """ - New core dll that is less likely to break when Rocket League is patched - ccman32 and hallo-doei - Fixed bug resulting in incorrect quickchat - dtracers - Added more built-in colors to the python rendering manager - Eastvillage - Fix for items with a ':' not showing up in the GUI - hallo-doei - Fix for GUI not saving correct path - hallo-doei - Fix for GUI crash when saving preset then canceling - hallo-doei - Adding file checking before injection (Resolves #167) - Redox - Fixed typo in rlbot.cfg - Redox - Fancy release notes - tarehart and Skyborg """ } release_banner = """ ______ _ ______ _ 10100 | ___ \ | | ___ \ | | 00101 110011 | |_/ / | | |_/ / ___ | |_ 110011 00110110 | /| | | ___ \/ _ \| __| 01101100 010010 | |\ \| |____| |_/ / (_) | |_ 010010 10010 \_| \_\_____/\____/ \___/ \__| 01001 """ def get_current_release_notes(): if __version__ in release_notes: return release_notes[__version__] return '' def get_help_text(): return "Trouble? Ask on Discord at https://discord.gg/5cNbXgG " \ "or report an issue at https://github.com/RLBot/RLBot/issues" def print_current_release_notes(): print(release_banner) print("Version {}".format(__version__)) print(get_current_release_notes()) print(get_help_text()) print("")
[]
muffin-rice/pad-cogs
dungeoncog/enemy_skills_pb2.py
820ecf08f9569a3d7cf3264d0eb9567264b42edf
# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: enemy_skills.proto """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() DESCRIPTOR = _descriptor.FileDescriptor( name='enemy_skills.proto', package='dadguide_proto', syntax='proto3', serialized_options=None, create_key=_descriptor._internal_create_key, serialized_pb=b'\n\x12\x65nemy_skills.proto\x12\x0e\x64\x61\x64guide_proto\"\xbf\x02\n\x1cMonsterBehaviorWithOverrides\x12\x12\n\nmonster_id\x18\x01 \x01(\x05\x12-\n\x06levels\x18\x02 \x03(\x0b\x32\x1d.dadguide_proto.LevelBehavior\x12\x36\n\x0flevel_overrides\x18\x03 \x03(\x0b\x32\x1d.dadguide_proto.LevelBehavior\x12\x43\n\x06status\x18\x04 \x01(\x0e\x32\x33.dadguide_proto.MonsterBehaviorWithOverrides.Status\"_\n\x06Status\x12\x10\n\x0cNOT_APPROVED\x10\x00\x12\x12\n\x0e\x41PPROVED_AS_IS\x10\x01\x12\x14\n\x10NEEDS_REAPPROVAL\x10\x02\x12\x19\n\x15\x41PPROVED_WITH_CHANGES\x10\x03\"f\n\x0fMonsterBehavior\x12\x12\n\nmonster_id\x18\x01 \x01(\x05\x12-\n\x06levels\x18\x02 \x03(\x0b\x32\x1d.dadguide_proto.LevelBehavior\x12\x10\n\x08\x61pproved\x18\x03 \x01(\x08\"M\n\rLevelBehavior\x12\r\n\x05level\x18\x01 \x01(\x05\x12-\n\x06groups\x18\x02 \x03(\x0b\x32\x1d.dadguide_proto.BehaviorGroup\"\xd9\x02\n\rBehaviorGroup\x12;\n\ngroup_type\x18\x01 \x01(\x0e\x32\'.dadguide_proto.BehaviorGroup.GroupType\x12,\n\tcondition\x18\x02 \x01(\x0b\x32\x19.dadguide_proto.Condition\x12.\n\x08\x63hildren\x18\x03 \x03(\x0b\x32\x1c.dadguide_proto.BehaviorItem\"\xac\x01\n\tGroupType\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\x0b\n\x07PASSIVE\x10\x01\x12\x0b\n\x07PREEMPT\x10\x02\x12\x11\n\rDISPEL_PLAYER\x10\x03\x12\x12\n\x0eMONSTER_STATUS\x10\x04\x12\r\n\tREMAINING\x10\x05\x12\x0c\n\x08STANDARD\x10\x06\x12\t\n\x05\x44\x45\x41TH\x10\x07\x12\x0f\n\x0bUNKNOWN_USE\x10\x08\x12\x14\n\x10HIGHEST_PRIORITY\x10\t\"u\n\x0c\x42\x65haviorItem\x12.\n\x05group\x18\x02 \x01(\x0b\x32\x1d.dadguide_proto.BehaviorGroupH\x00\x12,\n\x08\x62\x65havior\x18\x03 \x01(\x0b\x32\x18.dadguide_proto.BehaviorH\x00\x42\x07\n\x05value\"c\n\x08\x42\x65havior\x12,\n\tcondition\x18\x01 \x01(\x0b\x32\x19.dadguide_proto.Condition\x12\x16\n\x0e\x65nemy_skill_id\x18\x02 \x01(\x05\x12\x11\n\tchild_ids\x18\x03 \x03(\x05\"\x80\x04\n\tCondition\x12\x14\n\x0chp_threshold\x18\x01 \x01(\x05\x12\x12\n\nuse_chance\x18\x02 \x01(\x05\x12\x15\n\rrepeats_every\x18\x03 \x01(\x05\x12\x17\n\x0fglobal_one_time\x18\x04 \x01(\x08\x12\x19\n\x11limited_execution\x18\r \x01(\x05\x12!\n\x19trigger_enemies_remaining\x18\x05 \x01(\x05\x12\x13\n\x0bif_defeated\x18\x06 \x01(\x08\x12\x1f\n\x17if_attributes_available\x18\x07 \x01(\x08\x12\x18\n\x10trigger_monsters\x18\x08 \x03(\x05\x12\x16\n\x0etrigger_combos\x18\t \x01(\x05\x12\x1a\n\x12if_nothing_matched\x18\n \x01(\x08\x12\x14\n\x0ctrigger_turn\x18\x0b \x01(\x05\x12\x18\n\x10trigger_turn_end\x18\x0c \x01(\x05\x12\x1c\n\x14\x61lways_trigger_above\x18\x0e \x01(\x05\x12\x14\n\x0c\x61lways_after\x18\x0f \x01(\x05\x12\x11\n\tskill_set\x18\x10 \x01(\x05\x12\x19\n\x11\x65rased_attributes\x18\x11 \x03(\x05\x12\x13\n\x0b\x64\x61mage_done\x18\x12 \x01(\x05\x12\x1b\n\x13\x61ttributes_attacked\x18\x13 \x03(\x05\x12\x13\n\x0bskills_used\x18\x14 \x01(\x05\x62\x06proto3' ) _MONSTERBEHAVIORWITHOVERRIDES_STATUS = _descriptor.EnumDescriptor( name='Status', full_name='dadguide_proto.MonsterBehaviorWithOverrides.Status', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='NOT_APPROVED', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='APPROVED_AS_IS', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='NEEDS_REAPPROVAL', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='APPROVED_WITH_CHANGES', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=263, serialized_end=358, ) _sym_db.RegisterEnumDescriptor(_MONSTERBEHAVIORWITHOVERRIDES_STATUS) _BEHAVIORGROUP_GROUPTYPE = _descriptor.EnumDescriptor( name='GroupType', full_name='dadguide_proto.BehaviorGroup.GroupType', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='UNSPECIFIED', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='PASSIVE', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='PREEMPT', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='DISPEL_PLAYER', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='MONSTER_STATUS', index=4, number=4, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='REMAINING', index=5, number=5, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='STANDARD', index=6, number=6, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='DEATH', index=7, number=7, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='UNKNOWN_USE', index=8, number=8, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='HIGHEST_PRIORITY', index=9, number=9, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=717, serialized_end=889, ) _sym_db.RegisterEnumDescriptor(_BEHAVIORGROUP_GROUPTYPE) _MONSTERBEHAVIORWITHOVERRIDES = _descriptor.Descriptor( name='MonsterBehaviorWithOverrides', full_name='dadguide_proto.MonsterBehaviorWithOverrides', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='monster_id', full_name='dadguide_proto.MonsterBehaviorWithOverrides.monster_id', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='levels', full_name='dadguide_proto.MonsterBehaviorWithOverrides.levels', index=1, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='level_overrides', full_name='dadguide_proto.MonsterBehaviorWithOverrides.level_overrides', index=2, number=3, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='status', full_name='dadguide_proto.MonsterBehaviorWithOverrides.status', index=3, number=4, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ _MONSTERBEHAVIORWITHOVERRIDES_STATUS, ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=39, serialized_end=358, ) _MONSTERBEHAVIOR = _descriptor.Descriptor( name='MonsterBehavior', full_name='dadguide_proto.MonsterBehavior', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='monster_id', full_name='dadguide_proto.MonsterBehavior.monster_id', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='levels', full_name='dadguide_proto.MonsterBehavior.levels', index=1, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='approved', full_name='dadguide_proto.MonsterBehavior.approved', index=2, number=3, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=360, serialized_end=462, ) _LEVELBEHAVIOR = _descriptor.Descriptor( name='LevelBehavior', full_name='dadguide_proto.LevelBehavior', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='level', full_name='dadguide_proto.LevelBehavior.level', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='groups', full_name='dadguide_proto.LevelBehavior.groups', index=1, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=464, serialized_end=541, ) _BEHAVIORGROUP = _descriptor.Descriptor( name='BehaviorGroup', full_name='dadguide_proto.BehaviorGroup', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='group_type', full_name='dadguide_proto.BehaviorGroup.group_type', index=0, number=1, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='condition', full_name='dadguide_proto.BehaviorGroup.condition', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='children', full_name='dadguide_proto.BehaviorGroup.children', index=2, number=3, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ _BEHAVIORGROUP_GROUPTYPE, ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=544, serialized_end=889, ) _BEHAVIORITEM = _descriptor.Descriptor( name='BehaviorItem', full_name='dadguide_proto.BehaviorItem', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='group', full_name='dadguide_proto.BehaviorItem.group', index=0, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='behavior', full_name='dadguide_proto.BehaviorItem.behavior', index=1, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ _descriptor.OneofDescriptor( name='value', full_name='dadguide_proto.BehaviorItem.value', index=0, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), ], serialized_start=891, serialized_end=1008, ) _BEHAVIOR = _descriptor.Descriptor( name='Behavior', full_name='dadguide_proto.Behavior', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='condition', full_name='dadguide_proto.Behavior.condition', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='enemy_skill_id', full_name='dadguide_proto.Behavior.enemy_skill_id', index=1, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='child_ids', full_name='dadguide_proto.Behavior.child_ids', index=2, number=3, type=5, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1010, serialized_end=1109, ) _CONDITION = _descriptor.Descriptor( name='Condition', full_name='dadguide_proto.Condition', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='hp_threshold', full_name='dadguide_proto.Condition.hp_threshold', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='use_chance', full_name='dadguide_proto.Condition.use_chance', index=1, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='repeats_every', full_name='dadguide_proto.Condition.repeats_every', index=2, number=3, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='global_one_time', full_name='dadguide_proto.Condition.global_one_time', index=3, number=4, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='limited_execution', full_name='dadguide_proto.Condition.limited_execution', index=4, number=13, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='trigger_enemies_remaining', full_name='dadguide_proto.Condition.trigger_enemies_remaining', index=5, number=5, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='if_defeated', full_name='dadguide_proto.Condition.if_defeated', index=6, number=6, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='if_attributes_available', full_name='dadguide_proto.Condition.if_attributes_available', index=7, number=7, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='trigger_monsters', full_name='dadguide_proto.Condition.trigger_monsters', index=8, number=8, type=5, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='trigger_combos', full_name='dadguide_proto.Condition.trigger_combos', index=9, number=9, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='if_nothing_matched', full_name='dadguide_proto.Condition.if_nothing_matched', index=10, number=10, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='trigger_turn', full_name='dadguide_proto.Condition.trigger_turn', index=11, number=11, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='trigger_turn_end', full_name='dadguide_proto.Condition.trigger_turn_end', index=12, number=12, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='always_trigger_above', full_name='dadguide_proto.Condition.always_trigger_above', index=13, number=14, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='always_after', full_name='dadguide_proto.Condition.always_after', index=14, number=15, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='skill_set', full_name='dadguide_proto.Condition.skill_set', index=15, number=16, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='erased_attributes', full_name='dadguide_proto.Condition.erased_attributes', index=16, number=17, type=5, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='damage_done', full_name='dadguide_proto.Condition.damage_done', index=17, number=18, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='attributes_attacked', full_name='dadguide_proto.Condition.attributes_attacked', index=18, number=19, type=5, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='skills_used', full_name='dadguide_proto.Condition.skills_used', index=19, number=20, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1112, serialized_end=1624, ) _MONSTERBEHAVIORWITHOVERRIDES.fields_by_name['levels'].message_type = _LEVELBEHAVIOR _MONSTERBEHAVIORWITHOVERRIDES.fields_by_name['level_overrides'].message_type = _LEVELBEHAVIOR _MONSTERBEHAVIORWITHOVERRIDES.fields_by_name['status'].enum_type = _MONSTERBEHAVIORWITHOVERRIDES_STATUS _MONSTERBEHAVIORWITHOVERRIDES_STATUS.containing_type = _MONSTERBEHAVIORWITHOVERRIDES _MONSTERBEHAVIOR.fields_by_name['levels'].message_type = _LEVELBEHAVIOR _LEVELBEHAVIOR.fields_by_name['groups'].message_type = _BEHAVIORGROUP _BEHAVIORGROUP.fields_by_name['group_type'].enum_type = _BEHAVIORGROUP_GROUPTYPE _BEHAVIORGROUP.fields_by_name['condition'].message_type = _CONDITION _BEHAVIORGROUP.fields_by_name['children'].message_type = _BEHAVIORITEM _BEHAVIORGROUP_GROUPTYPE.containing_type = _BEHAVIORGROUP _BEHAVIORITEM.fields_by_name['group'].message_type = _BEHAVIORGROUP _BEHAVIORITEM.fields_by_name['behavior'].message_type = _BEHAVIOR _BEHAVIORITEM.oneofs_by_name['value'].fields.append( _BEHAVIORITEM.fields_by_name['group']) _BEHAVIORITEM.fields_by_name['group'].containing_oneof = _BEHAVIORITEM.oneofs_by_name['value'] _BEHAVIORITEM.oneofs_by_name['value'].fields.append( _BEHAVIORITEM.fields_by_name['behavior']) _BEHAVIORITEM.fields_by_name['behavior'].containing_oneof = _BEHAVIORITEM.oneofs_by_name['value'] _BEHAVIOR.fields_by_name['condition'].message_type = _CONDITION DESCRIPTOR.message_types_by_name['MonsterBehaviorWithOverrides'] = _MONSTERBEHAVIORWITHOVERRIDES DESCRIPTOR.message_types_by_name['MonsterBehavior'] = _MONSTERBEHAVIOR DESCRIPTOR.message_types_by_name['LevelBehavior'] = _LEVELBEHAVIOR DESCRIPTOR.message_types_by_name['BehaviorGroup'] = _BEHAVIORGROUP DESCRIPTOR.message_types_by_name['BehaviorItem'] = _BEHAVIORITEM DESCRIPTOR.message_types_by_name['Behavior'] = _BEHAVIOR DESCRIPTOR.message_types_by_name['Condition'] = _CONDITION _sym_db.RegisterFileDescriptor(DESCRIPTOR) MonsterBehaviorWithOverrides = _reflection.GeneratedProtocolMessageType('MonsterBehaviorWithOverrides', (_message.Message,), { 'DESCRIPTOR': _MONSTERBEHAVIORWITHOVERRIDES, '__module__': 'enemy_skills_pb2' # @@protoc_insertion_point(class_scope:dadguide_proto.MonsterBehaviorWithOverrides) }) _sym_db.RegisterMessage(MonsterBehaviorWithOverrides) MonsterBehavior = _reflection.GeneratedProtocolMessageType('MonsterBehavior', (_message.Message,), { 'DESCRIPTOR': _MONSTERBEHAVIOR, '__module__': 'enemy_skills_pb2' # @@protoc_insertion_point(class_scope:dadguide_proto.MonsterBehavior) }) _sym_db.RegisterMessage(MonsterBehavior) LevelBehavior = _reflection.GeneratedProtocolMessageType('LevelBehavior', (_message.Message,), { 'DESCRIPTOR': _LEVELBEHAVIOR, '__module__': 'enemy_skills_pb2' # @@protoc_insertion_point(class_scope:dadguide_proto.LevelBehavior) }) _sym_db.RegisterMessage(LevelBehavior) BehaviorGroup = _reflection.GeneratedProtocolMessageType('BehaviorGroup', (_message.Message,), { 'DESCRIPTOR': _BEHAVIORGROUP, '__module__': 'enemy_skills_pb2' # @@protoc_insertion_point(class_scope:dadguide_proto.BehaviorGroup) }) _sym_db.RegisterMessage(BehaviorGroup) BehaviorItem = _reflection.GeneratedProtocolMessageType('BehaviorItem', (_message.Message,), { 'DESCRIPTOR': _BEHAVIORITEM, '__module__': 'enemy_skills_pb2' # @@protoc_insertion_point(class_scope:dadguide_proto.BehaviorItem) }) _sym_db.RegisterMessage(BehaviorItem) Behavior = _reflection.GeneratedProtocolMessageType('Behavior', (_message.Message,), { 'DESCRIPTOR': _BEHAVIOR, '__module__': 'enemy_skills_pb2' # @@protoc_insertion_point(class_scope:dadguide_proto.Behavior) }) _sym_db.RegisterMessage(Behavior) Condition = _reflection.GeneratedProtocolMessageType('Condition', (_message.Message,), { 'DESCRIPTOR': _CONDITION, '__module__': 'enemy_skills_pb2' # @@protoc_insertion_point(class_scope:dadguide_proto.Condition) }) _sym_db.RegisterMessage(Condition) # @@protoc_insertion_point(module_scope)
[((12, 10, 12, 36), 'google.protobuf.symbol_database.Default', '_symbol_database.Default', ({}, {}), '()', True, 'from google.protobuf import symbol_database as _symbol_database\n'), ((14, 13, 21, 1), 'google.protobuf.descriptor.FileDescriptor', '_descriptor.FileDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((585, 31, 590, 74), 'google.protobuf.reflection.GeneratedProtocolMessageType', '_reflection.GeneratedProtocolMessageType', ({(585, 72, 585, 102): '"""MonsterBehaviorWithOverrides"""', (586, 72, 586, 91): '(_message.Message,)', (586, 93, 590, 73): "{'DESCRIPTOR': _MONSTERBEHAVIORWITHOVERRIDES, '__module__': 'enemy_skills_pb2'}"}, {}), "('MonsterBehaviorWithOverrides', (\n _message.Message,), {'DESCRIPTOR': _MONSTERBEHAVIORWITHOVERRIDES,\n '__module__': 'enemy_skills_pb2'})", True, 'from google.protobuf import reflection as _reflection\n'), ((593, 18, 597, 2), 'google.protobuf.reflection.GeneratedProtocolMessageType', '_reflection.GeneratedProtocolMessageType', ({(593, 59, 593, 76): '"""MonsterBehavior"""', (593, 78, 593, 97): '(_message.Message,)', (593, 99, 597, 1): "{'DESCRIPTOR': _MONSTERBEHAVIOR, '__module__': 'enemy_skills_pb2'}"}, {}), "('MonsterBehavior', (_message.\n Message,), {'DESCRIPTOR': _MONSTERBEHAVIOR, '__module__':\n 'enemy_skills_pb2'})", True, 'from google.protobuf import reflection as _reflection\n'), ((600, 16, 604, 2), 'google.protobuf.reflection.GeneratedProtocolMessageType', '_reflection.GeneratedProtocolMessageType', ({(600, 57, 600, 72): '"""LevelBehavior"""', (600, 74, 600, 93): '(_message.Message,)', (600, 95, 604, 1): "{'DESCRIPTOR': _LEVELBEHAVIOR, '__module__': 'enemy_skills_pb2'}"}, {}), "('LevelBehavior', (_message.Message\n ,), {'DESCRIPTOR': _LEVELBEHAVIOR, '__module__': 'enemy_skills_pb2'})", True, 'from google.protobuf import reflection as _reflection\n'), ((607, 16, 611, 2), 'google.protobuf.reflection.GeneratedProtocolMessageType', '_reflection.GeneratedProtocolMessageType', ({(607, 57, 607, 72): '"""BehaviorGroup"""', (607, 74, 607, 93): '(_message.Message,)', (607, 95, 611, 1): "{'DESCRIPTOR': _BEHAVIORGROUP, '__module__': 'enemy_skills_pb2'}"}, {}), "('BehaviorGroup', (_message.Message\n ,), {'DESCRIPTOR': _BEHAVIORGROUP, '__module__': 'enemy_skills_pb2'})", True, 'from google.protobuf import reflection as _reflection\n'), ((614, 15, 618, 2), 'google.protobuf.reflection.GeneratedProtocolMessageType', '_reflection.GeneratedProtocolMessageType', ({(614, 56, 614, 70): '"""BehaviorItem"""', (614, 72, 614, 91): '(_message.Message,)', (614, 93, 618, 1): "{'DESCRIPTOR': _BEHAVIORITEM, '__module__': 'enemy_skills_pb2'}"}, {}), "('BehaviorItem', (_message.Message,\n ), {'DESCRIPTOR': _BEHAVIORITEM, '__module__': 'enemy_skills_pb2'})", True, 'from google.protobuf import reflection as _reflection\n'), ((621, 11, 625, 2), 'google.protobuf.reflection.GeneratedProtocolMessageType', '_reflection.GeneratedProtocolMessageType', ({(621, 52, 621, 62): '"""Behavior"""', (621, 64, 621, 83): '(_message.Message,)', (621, 85, 625, 1): "{'DESCRIPTOR': _BEHAVIOR, '__module__': 'enemy_skills_pb2'}"}, {}), "('Behavior', (_message.Message,), {\n 'DESCRIPTOR': _BEHAVIOR, '__module__': 'enemy_skills_pb2'})", True, 'from google.protobuf import reflection as _reflection\n'), ((628, 12, 632, 2), 'google.protobuf.reflection.GeneratedProtocolMessageType', '_reflection.GeneratedProtocolMessageType', ({(628, 53, 628, 64): '"""Condition"""', (628, 66, 628, 85): '(_message.Message,)', (628, 87, 632, 1): "{'DESCRIPTOR': _CONDITION, '__module__': 'enemy_skills_pb2'}"}, {}), "('Condition', (_message.Message,),\n {'DESCRIPTOR': _CONDITION, '__module__': 'enemy_skills_pb2'})", True, 'from google.protobuf import reflection as _reflection\n'), ((30, 8, 34, 56), 'google.protobuf.descriptor.EnumValueDescriptor', '_descriptor.EnumValueDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((35, 8, 39, 56), 'google.protobuf.descriptor.EnumValueDescriptor', '_descriptor.EnumValueDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((40, 8, 44, 56), 'google.protobuf.descriptor.EnumValueDescriptor', '_descriptor.EnumValueDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((45, 8, 49, 56), 'google.protobuf.descriptor.EnumValueDescriptor', '_descriptor.EnumValueDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((65, 8, 69, 56), 'google.protobuf.descriptor.EnumValueDescriptor', '_descriptor.EnumValueDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((70, 8, 74, 56), 'google.protobuf.descriptor.EnumValueDescriptor', '_descriptor.EnumValueDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((75, 8, 79, 56), 'google.protobuf.descriptor.EnumValueDescriptor', '_descriptor.EnumValueDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((80, 8, 84, 56), 'google.protobuf.descriptor.EnumValueDescriptor', '_descriptor.EnumValueDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((85, 8, 89, 56), 'google.protobuf.descriptor.EnumValueDescriptor', '_descriptor.EnumValueDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((90, 8, 94, 56), 'google.protobuf.descriptor.EnumValueDescriptor', '_descriptor.EnumValueDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((95, 8, 99, 56), 'google.protobuf.descriptor.EnumValueDescriptor', '_descriptor.EnumValueDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((100, 8, 104, 56), 'google.protobuf.descriptor.EnumValueDescriptor', '_descriptor.EnumValueDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((105, 8, 109, 56), 'google.protobuf.descriptor.EnumValueDescriptor', '_descriptor.EnumValueDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((110, 8, 114, 56), 'google.protobuf.descriptor.EnumValueDescriptor', '_descriptor.EnumValueDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((131, 8, 137, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((138, 8, 144, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((145, 8, 151, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((152, 8, 158, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((184, 8, 190, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((191, 8, 197, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((198, 8, 204, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((229, 8, 235, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((236, 8, 242, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((267, 8, 273, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((274, 8, 280, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((281, 8, 287, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((313, 8, 319, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((320, 8, 326, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((338, 8, 342, 22), 'google.protobuf.descriptor.OneofDescriptor', '_descriptor.OneofDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((356, 8, 362, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((363, 8, 369, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((370, 8, 376, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((401, 8, 407, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((408, 8, 414, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((415, 8, 421, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((422, 8, 428, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((429, 8, 435, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((436, 8, 442, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((443, 8, 449, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((450, 8, 456, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((457, 8, 463, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((464, 8, 470, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((471, 8, 477, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((478, 8, 484, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((485, 8, 491, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((492, 8, 498, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((499, 8, 505, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((506, 8, 512, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((513, 8, 519, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((520, 8, 526, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((527, 8, 533, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((534, 8, 540, 98), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n')]
dlegor/ClassyVision
classy_vision/heads/fully_connected_head.py
9c82d533b66b0a5fbb11f8ab3567a9c70aa4e013
#!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from typing import Any, Dict import torch.nn as nn from classy_vision.generic.util import is_pos_int from classy_vision.heads import ClassyHead, register_head @register_head("fully_connected") class FullyConnectedHead(ClassyHead): """This head defines a 2d average pooling layer (:class:`torch.nn.AdaptiveAvgPool2d`) followed by a fully connected layer (:class:`torch.nn.Linear`). """ def __init__( self, unique_id: str, num_classes: int, in_plane: int, zero_init_bias: bool = False, ): """Constructor for FullyConnectedHead Args: unique_id: A unique identifier for the head. Multiple instances of the same head might be attached to a model, and unique_id is used to refer to them. num_classes: Number of classes for the head. If None, then the fully connected layer is not applied. in_plane: Input size for the fully connected layer. """ super().__init__(unique_id, num_classes) assert num_classes is None or is_pos_int(num_classes) assert is_pos_int(in_plane) self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) self.fc = None if num_classes is None else nn.Linear(in_plane, num_classes) if zero_init_bias: self.fc.bias.data.zero_() @classmethod def from_config(cls, config: Dict[str, Any]) -> "FullyConnectedHead": """Instantiates a FullyConnectedHead from a configuration. Args: config: A configuration for a FullyConnectedHead. See :func:`__init__` for parameters expected in the config. Returns: A FullyConnectedHead instance. """ num_classes = config.get("num_classes", None) in_plane = config["in_plane"] return cls( config["unique_id"], num_classes, in_plane, zero_init_bias=config.get("zero_init_bias", False), ) def forward(self, x): # perform average pooling: out = self.avgpool(x) # final classifier: out = out.reshape(out.size(0), -1) if self.fc is not None: out = self.fc(out) return out
[((14, 1, 14, 33), 'classy_vision.heads.register_head', 'register_head', ({(14, 15, 14, 32): '"""fully_connected"""'}, {}), "('fully_connected')", False, 'from classy_vision.heads import ClassyHead, register_head\n'), ((42, 15, 42, 35), 'classy_vision.generic.util.is_pos_int', 'is_pos_int', ({(42, 26, 42, 34): 'in_plane'}, {}), '(in_plane)', False, 'from classy_vision.generic.util import is_pos_int\n'), ((43, 23, 43, 51), 'torch.nn.AdaptiveAvgPool2d', 'nn.AdaptiveAvgPool2d', ({(43, 44, 43, 50): '(1, 1)'}, {}), '((1, 1))', True, 'import torch.nn as nn\n'), ((41, 38, 41, 61), 'classy_vision.generic.util.is_pos_int', 'is_pos_int', ({(41, 49, 41, 60): 'num_classes'}, {}), '(num_classes)', False, 'from classy_vision.generic.util import is_pos_int\n'), ((44, 51, 44, 83), 'torch.nn.Linear', 'nn.Linear', ({(44, 61, 44, 69): 'in_plane', (44, 71, 44, 82): 'num_classes'}, {}), '(in_plane, num_classes)', True, 'import torch.nn as nn\n')]
StanleyHou117/group66_LentTermProject
Task2C.py
0255310cb202f21cada8cf7c0f45a045a9b72c1f
from floodsystem.stationdata import build_station_list from floodsystem.flood import stations_highest_rel_level def run(): stations = build_station_list() warning_stations = stations_highest_rel_level(stations,10) for entry in warning_stations: print(entry[0].name,entry[1]) if __name__ == "__main__": print("*** Task 2C: CUED Part IA Flood Warning System ***") run()
[((5, 15, 5, 35), 'floodsystem.stationdata.build_station_list', 'build_station_list', ({}, {}), '()', False, 'from floodsystem.stationdata import build_station_list\n'), ((6, 23, 6, 62), 'floodsystem.flood.stations_highest_rel_level', 'stations_highest_rel_level', ({(6, 50, 6, 58): 'stations', (6, 59, 6, 61): '10'}, {}), '(stations, 10)', False, 'from floodsystem.flood import stations_highest_rel_level\n')]
danijoo/biotite
src/biotite/copyable.py
22072e64676e4e917236eac8493eed4c6a22cc33
# This source code is part of the Biotite package and is distributed # under the 3-Clause BSD License. Please see 'LICENSE.rst' for further # information. __name__ = "biotite" __author__ = "Patrick Kunzmann" __all__ = ["Copyable"] import abc class Copyable(metaclass=abc.ABCMeta): """ Base class for all objects, that should be copyable. The public method `copy()` first creates a fresh instance of the class of the instance, that is copied via the `__copy_create__()` method. All variables, that could not be set via the constructor, are then copied via `__copy_fill__()`, starting with the method in the uppermost base class and ending with the class of the instance to be copied. This approach solves the problem of encapsulated variables in superclasses. """ def copy(self): """ Create a deep copy of this object. Returns ------- copy A copy of this object. """ clone = self.__copy_create__() self.__copy_fill__(clone) return clone def __copy_create__(self): """ Instantiate a new object of this class. Only the constructor should be called in this method. All further attributes, that need to be copied are handled in `__copy_fill__()` Do not call the `super()` method here. This method must be overridden, if the constructor takes parameters. Returns ------- copy A freshly instantiated copy of *self*. """ return type(self)() def __copy_fill__(self, clone): """ Copy all necessary attributes to the new object. Always call the `super()` method as first statement. Parameters ---------- clone The freshly instantiated copy of *self*. """ pass
[]
np-hacs/ha-wyzeapi
custom_components/wyzeapi/binary_sensor.py
8abc6af59d36514008f696310b290a046d7c7a72
import logging import time from datetime import timedelta from typing import List from homeassistant.components.binary_sensor import ( BinarySensorEntity, DEVICE_CLASS_MOTION ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_ATTRIBUTION from homeassistant.core import HomeAssistant from wyzeapy.base_client import Device, AccessTokenError from wyzeapy.client import Client from wyzeapy.types import PropertyIDs from .const import DOMAIN _LOGGER = logging.getLogger(__name__) ATTRIBUTION = "Data provided by Wyze" SCAN_INTERVAL = timedelta(seconds=10) async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities): _LOGGER.debug("""Creating new WyzeApi binary sensor component""") client: Client = hass.data[DOMAIN][config_entry.entry_id] def get_cameras() -> List[Device]: try: return client.get_cameras() except AccessTokenError as e: _LOGGER.warning(e) client.reauthenticate() return client.get_cameras() cameras = [WyzeCameraMotion(client, camera) for camera in await hass.async_add_executor_job(get_cameras)] async_add_entities(cameras, True) class WyzeCameraMotion(BinarySensorEntity): _on: bool _available: bool def __init__(self, wyzeapi_client: Client, device: Device): self._client = wyzeapi_client self._device = device self._last_event = int(str(int(time.time())) + "000") @property def device_info(self): return { "identifiers": { (DOMAIN, self._device.mac) }, "name": self.name, "manufacturer": "WyzeLabs", "model": self._device.product_model } @property def available(self) -> bool: return self._available @property def name(self): """Return the display name of this switch.""" return self._device.nickname @property def is_on(self): """Return true if switch is on.""" return self._on @property def unique_id(self): return "{}-motion".format(self._device.mac) @property def device_state_attributes(self): """Return device attributes of the entity.""" return { ATTR_ATTRIBUTION: ATTRIBUTION, "state": self.is_on, "available": self.available, "device model": self._device.product_model, "mac": self.unique_id } @property def device_class(self): return DEVICE_CLASS_MOTION def update(self): try: device_info = self._client.get_info(self._device) except AccessTokenError: self._client.reauthenticate() device_info = self._client.get_info(self._device) for property_id, value in device_info: if property_id == PropertyIDs.AVAILABLE: self._available = True if value == "1" else False latest_event = self._client.get_latest_event(self._device) if latest_event is not None: if latest_event.event_ts > self._last_event: self._on = True self._last_event = latest_event.event_ts else: self._on = False self._last_event = latest_event.event_ts else: self._on = False
[((19, 10, 19, 37), 'logging.getLogger', 'logging.getLogger', ({(19, 28, 19, 36): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((21, 16, 21, 37), 'datetime.timedelta', 'timedelta', (), '', False, 'from datetime import timedelta\n'), ((48, 39, 48, 50), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n')]
GEOS-ESM/AeroApps
src/Components/missions/GEMS/mcd43c.py
874dad6f34420c014d98eccbe81a061bdc0110cf
""" Reads climate modeling grid 0.05 degree MCD43 BRDF files. """ import os import sys from numpy import loadtxt, array, tile, where, concatenate, flipud from numpy import ones from datetime import date, datetime, timedelta from glob import glob from pyhdf.SD import SD, HDF4Error MISSING = 32.767 SDS = dict ( LAND = ('BRDF_Albedo_Parameter1_Band1','BRDF_Albedo_Parameter1_Band2', 'BRDF_Albedo_Parameter1_Band3','BRDF_Albedo_Parameter1_Band4', 'BRDF_Albedo_Parameter1_Band5','BRDF_Albedo_Parameter1_Band6', 'BRDF_Albedo_Parameter1_Band7', 'BRDF_Albedo_Parameter2_Band1','BRDF_Albedo_Parameter2_Band2', 'BRDF_Albedo_Parameter2_Band3','BRDF_Albedo_Parameter2_Band4', 'BRDF_Albedo_Parameter2_Band5','BRDF_Albedo_Parameter2_Band6', 'BRDF_Albedo_Parameter2_Band7', 'BRDF_Albedo_Parameter3_Band1','BRDF_Albedo_Parameter3_Band2', 'BRDF_Albedo_Parameter3_Band3','BRDF_Albedo_Parameter3_Band4', 'BRDF_Albedo_Parameter3_Band5','BRDF_Albedo_Parameter3_Band6', 'BRDF_Albedo_Parameter3_Band7'), QUAL = ('BRDF_Albedo_Quality', 'Snow_BRDF_Albedo', 'BRDF_Albedo_Ancillary', ) ) ALIAS = dict ( BRDF_Albedo_Parameter1_Band1 = 'KISO_b1_645', BRDF_Albedo_Parameter1_Band2 = 'KISO_b2_856', BRDF_Albedo_Parameter1_Band3 = 'KISO_b3_465', BRDF_Albedo_Parameter1_Band4 = 'KISO_b4_553', BRDF_Albedo_Parameter1_Band5 = 'KISO_b5_1241', BRDF_Albedo_Parameter1_Band6 = 'KISO_b6_1629', BRDF_Albedo_Parameter1_Band7 = 'KISO_b7_2114', BRDF_Albedo_Parameter2_Band1 = 'KVOL_b1_645', BRDF_Albedo_Parameter2_Band2 = 'KVOL_b2_856', BRDF_Albedo_Parameter2_Band3 = 'KVOL_b3_465', BRDF_Albedo_Parameter2_Band4 = 'KVOL_b4_553', BRDF_Albedo_Parameter2_Band5 = 'KVOL_b5_1241', BRDF_Albedo_Parameter2_Band6 = 'KVOL_b6_1629', BRDF_Albedo_Parameter2_Band7 = 'KVOL_b7_2114', BRDF_Albedo_Parameter3_Band1 = 'KGEO_b1_645', BRDF_Albedo_Parameter3_Band2 = 'KGEO_b2_856', BRDF_Albedo_Parameter3_Band3 = 'KGEO_b3_465', BRDF_Albedo_Parameter3_Band4 = 'KGEO_b4_553', BRDF_Albedo_Parameter3_Band5 = 'KGEO_b5_1241', BRDF_Albedo_Parameter3_Band6 = 'KGEO_b6_1629', BRDF_Albedo_Parameter3_Band7 = 'KGEO_b7_2114', ) #........................................................................... class McD43C(object): """ This class implements the MODIS LAND BRDF 16-day Level 3 products, MCD43C1 (0.05 degree horz res), """ def __init__ (self,Path,lon,lat,Verb=1): """ Reads files for one day of Level 3 MCD43C1 present on a given *Path* and returns an object with all 3 kernels coeff. On input, Required parameters: Path -- for now a single file. Eventually implement a single directory, or a list of files and directories. """ if type(lon) is list: lon = array(lon) lat = array(lat) # List of HDF files for a given date #----------------------------------- self.verb = Verb self.SDS = SDS['LAND'] #self.Tfiles = glob(Path + '*.hdf') if type(Path) is str: self.Files = [Path] else: self.Files = Path # From a list of lat and lon, return the # dx, dy on the grid # ------------------------------------- self.nobs = len(lon) self._findNearest(Path,lon,lat) # Read BRDF kernel in a MODIS tile # --------------------------------- self.read_BRDF() # Result #--- def _findNearest(self,path,lon,lat): """Given a list of lat, lon, return numbers to find the position of the nearest neighbor on the grid (dx,dy) """ dLon = 0.05 dLat = 0.05 Lon0 = -180 - dLon Lat0 = -90 + dLat self.dx = (0.5+(lon-Lon0)/dLon).astype(int) self.dy = (0.5+(lat-Lat0)/dLat).astype(int) if self.verb: print 'dx','dy', self.dx,self.dy #--- def read_BRDF(self): """Reads MCD43C1 file with Level 3 BRDF kernels for each MODIS band.""" # Create empty lists for SDS to be read from file # ----------------------------------------------- for name in self.SDS: self.__dict__[name] = [] BRDF = MISSING * ones((len(self.SDS),self.nobs)) for fn in self.Files: try: if self.verb: print "[] Working on "+fn hfile = SD(fn) except HDF4Error: if self.verb > 2: print "- %s: not recognized as an HDF file"%filename return # Read select variables (reshape to allow concatenation later) # ------------------------------------------------------------ for sds in self.SDS: if self.verb: print 'sds',self.SDS.index(sds) v = hfile.select(sds).get() a = hfile.select(sds).attributes() if a['scale_factor']!=1.0 or a['add_offset']!=0.0: v = a['scale_factor'] * v + a['add_offset'] if self.verb: print array(self.dx), BRDF.shape, BRDF[self.SDS.index(sds),:], v.shape v = flipud(v) BRDF[self.SDS.index(sds),:] = v[array(self.dy), array(self.dx)] for sds in self.SDS: self.__dict__[sds] = BRDF[self.SDS.index(sds),:] if sds in ALIAS.keys(): self.__dict__[ALIAS[sds]] = self.__dict__[sds] #--- #............................................................................ if __name__ == "__main__": path = '/nobackup/3/pcastell/MODIS/MCD43C1/MCD43C1.A2005361.005.2008094071946.hdf' lon = [-2.,-120.,15.2,17.2,170.1] lat = [88.,40.,-20.,-20.,-55.5] lon = np.arange(-180,180,1) lat = np.arange(-90,90,1) lon,lat = np.meshgrid(lon,lat) ex = McD43C(path,lon.flatten(),lat.flatte())
[]
kalyc/keras-apache-mxnet
tests/keras/layers/wrappers_test.py
5497ebd50a45ccc446b8944ebbe11fb7721a5533
import pytest import numpy as np import copy from numpy.testing import assert_allclose from keras.utils import CustomObjectScope from keras.layers import wrappers, Input, Layer from keras.layers import RNN from keras import layers from keras.models import Sequential, Model, model_from_json from keras import backend as K from keras.utils.generic_utils import object_list_uid, to_list @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support TimeDistributed and RNN yet') def test_TimeDistributed(): # first, test with Dense layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 2)), epochs=1, batch_size=10) # test config model.get_config() # test when specifying a batch_input_shape test_input = np.random.random((1, 3, 4)) test_output = model.predict(test_input) weights = model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Dense(2), batch_input_shape=(1, 3, 4))) reference.add(layers.Activation('relu')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) # test with Embedding model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6), batch_input_shape=(10, 3, 4), dtype='int32')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.randint(5, size=(10, 3, 4), dtype='int32'), np.random.random((10, 3, 4, 6)), epochs=1, batch_size=10) # compare to not using batch_input_shape test_input = np.random.randint(5, size=(10, 3, 4), dtype='int32') test_output = model.predict(test_input) weights = model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Embedding(5, 6), input_shape=(3, 4), dtype='int32')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) # test with Conv2D model = Sequential() model.add(wrappers.TimeDistributed(layers.Conv2D(5, (2, 2), padding='same'), input_shape=(2, 4, 4, 3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(np.random.random((1, 2, 4, 4, 3)), np.random.random((1, 2, 4, 4, 5))) model = model_from_json(model.to_json()) model.summary() # test stacked layers model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(wrappers.TimeDistributed(layers.Dense(3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test wrapping Sequential model model = Sequential() model.add(layers.Dense(3, input_dim=2)) outer_model = Sequential() outer_model.add(wrappers.TimeDistributed(model, input_shape=(3, 2))) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test with functional API x = Input(shape=(3, 2)) y = wrappers.TimeDistributed(model)(x) outer_model = Model(x, y) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test with BatchNormalization model = Sequential() model.add(wrappers.TimeDistributed( layers.BatchNormalization(center=True, scale=True), name='bn', input_shape=(10, 2))) model.compile(optimizer='rmsprop', loss='mse') # Assert that mean and variance are 0 and 1. td = model.layers[0] assert np.array_equal(td.get_weights()[2], np.array([0, 0])) assert np.array_equal(td.get_weights()[3], np.array([1, 1])) # Train model.train_on_batch(np.random.normal(loc=2, scale=2, size=(1, 10, 2)), np.broadcast_to(np.array([0, 1]), (1, 10, 2))) # Assert that mean and variance changed. assert not np.array_equal(td.get_weights()[2], np.array([0, 0])) assert not np.array_equal(td.get_weights()[3], np.array([1, 1])) # Verify input_map has one mapping from inputs to reshaped inputs. uid = object_list_uid(model.inputs) assert len(td._input_map.keys()) == 1 assert uid in td._input_map assert K.int_shape(td._input_map[uid]) == (None, 2) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support TimeDistributed and RNN yet') @pytest.mark.skipif((K.backend() == 'cntk'), reason='Flaky with CNTK backend') def test_TimeDistributed_learning_phase(): # test layers that need learning_phase to be set np.random.seed(1234) x = Input(shape=(3, 2)) y = wrappers.TimeDistributed(layers.Dropout(.999))(x, training=True) model = Model(x, y) y = model.predict(np.random.random((10, 3, 2))) assert_allclose(np.mean(y), 0., atol=1e-1, rtol=1e-1) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support TimeDistributed and RNN yet') def test_TimeDistributed_trainable(): # test layers that need learning_phase to be set x = Input(shape=(3, 2)) layer = wrappers.TimeDistributed(layers.BatchNormalization()) _ = layer(x) assert len(layer.updates) == 2 assert len(layer.trainable_weights) == 2 layer.trainable = False assert len(layer.updates) == 0 assert len(layer.trainable_weights) == 0 layer.trainable = True assert len(layer.updates) == 2 assert len(layer.trainable_weights) == 2 @pytest.mark.skipif((K.backend() == 'cntk' or K.backend() == 'mxnet'), reason='Unknown timestamps for RNN not supported in CNTK and MXNet.') def test_TimeDistributed_with_masked_embedding_and_unspecified_shape(): # test with unspecified shape and Embeddings with mask_zero model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6, mask_zero=True), input_shape=(None, None))) # the shape so far: (N, t_1, t_2, 6) model.add(wrappers.TimeDistributed(layers.SimpleRNN(7, return_sequences=True))) model.add(wrappers.TimeDistributed(layers.SimpleRNN(8, return_sequences=False))) model.add(layers.SimpleRNN(1, return_sequences=False)) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5, size=(10, 3, 4), dtype='int32') for i in range(4): model_input[i, i:, i:] = 0 model.fit(model_input, np.random.random((10, 1)), epochs=1, batch_size=10) mask_outputs = [model.layers[0].compute_mask(model.input)] for layer in model.layers[1:]: mask_outputs.append(layer.compute_mask(layer.input, mask_outputs[-1])) func = K.function([model.input], mask_outputs[:-1]) mask_outputs_val = func([model_input]) ref_mask_val_0 = model_input > 0 # embedding layer ref_mask_val_1 = ref_mask_val_0 # first RNN layer ref_mask_val_2 = np.any(ref_mask_val_1, axis=-1) # second RNN layer ref_mask_val = [ref_mask_val_0, ref_mask_val_1, ref_mask_val_2] for i in range(3): assert np.array_equal(mask_outputs_val[i], ref_mask_val[i]) assert mask_outputs[-1] is None # final layer @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support TimeDistributed and RNN yet') def test_TimeDistributed_with_masking_layer(): # test with Masking layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Masking(mask_value=0.,), input_shape=(None, 4))) model.add(wrappers.TimeDistributed(layers.Dense(5))) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5, size=(10, 3, 4)) for i in range(4): model_input[i, i:, :] = 0. model.compile(optimizer='rmsprop', loss='mse') model.fit(model_input, np.random.random((10, 3, 5)), epochs=1, batch_size=6) mask_outputs = [model.layers[0].compute_mask(model.input)] mask_outputs += [model.layers[1].compute_mask(model.layers[1].input, mask_outputs[-1])] func = K.function([model.input], mask_outputs) mask_outputs_val = func([model_input]) assert np.array_equal(mask_outputs_val[0], np.any(model_input, axis=-1)) assert np.array_equal(mask_outputs_val[1], np.any(model_input, axis=-1)) def test_regularizers(): model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, kernel_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.layers[0].layer.losses) == 1 assert len(model.layers[0].losses) == 1 assert len(model.layers[0].get_losses_for(None)) == 1 assert len(model.losses) == 1 model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, activity_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.losses) == 1 def test_Bidirectional(): rnn = layers.SimpleRNN samples = 2 dim = 2 timesteps = 2 output_dim = 2 dropout_rate = 0.2 for mode in ['sum', 'concat']: x = np.random.random((samples, timesteps, dim)) target_dim = 2 * output_dim if mode == 'concat' else output_dim y = np.random.random((samples, target_dim)) # test with Sequential model model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode, input_shape=(timesteps, dim))) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # test config model.get_config() model = model_from_json(model.to_json()) model.summary() # test stacked bidirectional layers model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, return_sequences=True), merge_mode=mode, input_shape=(timesteps, dim))) model.add(wrappers.Bidirectional(rnn(output_dim), merge_mode=mode)) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # test with functional API inputs = Input((timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # Bidirectional and stateful inputs = Input(batch_shape=(1, timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, stateful=True), merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.skipif((K.backend() == 'cntk'), reason='Unknown timestamps not supported in CNTK.') def test_Bidirectional_dynamic_timesteps(): # test with functional API with dynamic length rnn = layers.SimpleRNN samples = 2 dim = 2 timesteps = 2 output_dim = 2 dropout_rate = 0.2 for mode in ['sum', 'concat']: x = np.random.random((samples, timesteps, dim)) target_dim = 2 * output_dim if mode == 'concat' else output_dim y = np.random.random((samples, target_dim)) inputs = Input((None, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.parametrize('merge_mode', ['sum', 'mul', 'ave', 'concat', None]) def test_Bidirectional_merged_value(merge_mode): rnn = layers.LSTM samples = 2 dim = 5 timesteps = 3 units = 3 X = [np.random.rand(samples, timesteps, dim)] if merge_mode == 'sum': merge_func = lambda y, y_rev: y + y_rev elif merge_mode == 'mul': merge_func = lambda y, y_rev: y * y_rev elif merge_mode == 'ave': merge_func = lambda y, y_rev: (y + y_rev) / 2 elif merge_mode == 'concat': merge_func = lambda y, y_rev: np.concatenate((y, y_rev), axis=-1) else: merge_func = lambda y, y_rev: [y, y_rev] # basic case inputs = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_sequences=True), merge_mode=merge_mode) f_merged = K.function([inputs], to_list(layer(inputs))) f_forward = K.function([inputs], [layer.forward_layer.call(inputs)]) f_backward = K.function([inputs], [K.reverse(layer.backward_layer.call(inputs), 1)]) y_merged = f_merged(X) y_expected = to_list(merge_func(f_forward(X)[0], f_backward(X)[0])) assert len(y_merged) == len(y_expected) for x1, x2 in zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5) # test return_state inputs = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True), merge_mode=merge_mode) f_merged = K.function([inputs], layer(inputs)) f_forward = K.function([inputs], layer.forward_layer.call(inputs)) f_backward = K.function([inputs], layer.backward_layer.call(inputs)) n_states = len(layer.layer.states) y_merged = f_merged(X) y_forward = f_forward(X) y_backward = f_backward(X) y_expected = to_list(merge_func(y_forward[0], y_backward[0])) assert len(y_merged) == len(y_expected) + n_states * 2 for x1, x2 in zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5) # test if the state of a BiRNN is the concatenation of the underlying RNNs y_merged = y_merged[-n_states * 2:] y_forward = y_forward[-n_states:] y_backward = y_backward[-n_states:] for state_birnn, state_inner in zip(y_merged, y_forward + y_backward): assert_allclose(state_birnn, state_inner, atol=1e-5) @pytest.mark.skipif(K.backend() == 'theano' or K.backend() == 'mxnet', reason='Not supported.') @pytest.mark.parametrize('merge_mode', ['sum', 'concat', None]) def test_Bidirectional_dropout(merge_mode): rnn = layers.LSTM samples = 2 dim = 5 timesteps = 3 units = 3 X = [np.random.rand(samples, timesteps, dim)] inputs = Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, recurrent_dropout=0.2), merge_mode=merge_mode) outputs = to_list(wrapped(inputs, training=True)) assert all(not getattr(x, '_uses_learning_phase') for x in outputs) inputs = Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, return_state=True), merge_mode=merge_mode) outputs = to_list(wrapped(inputs)) assert all(x._uses_learning_phase for x in outputs) model = Model(inputs, outputs) assert model.uses_learning_phase y1 = to_list(model.predict(X)) y2 = to_list(model.predict(X)) for x1, x2 in zip(y1, y2): assert_allclose(x1, x2, atol=1e-5) def test_Bidirectional_state_reuse(): rnn = layers.LSTM samples = 2 dim = 5 timesteps = 3 units = 3 input1 = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True, return_sequences=True)) state = layer(input1)[1:] # test passing invalid initial_state: passing a tensor input2 = Input((timesteps, dim)) with pytest.raises(ValueError): output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state[0]) # test valid usage: passing a list output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state) model = Model([input1, input2], output) assert len(model.layers) == 4 assert isinstance(model.layers[-1].input, list) inputs = [np.random.rand(samples, timesteps, dim), np.random.rand(samples, timesteps, dim)] outputs = model.predict(inputs) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support custom RNN cell yet') def test_Bidirectional_with_constants(): class RNNCellWithConstants(Layer): def __init__(self, units, **kwargs): self.units = units self.state_size = units super(RNNCellWithConstants, self).__init__(**kwargs) def build(self, input_shape): if not isinstance(input_shape, list): raise TypeError('expects constants shape') [input_shape, constant_shape] = input_shape # will (and should) raise if more than one constant passed self.input_kernel = self.add_weight( shape=(input_shape[-1], self.units), initializer='uniform', name='kernel') self.recurrent_kernel = self.add_weight( shape=(self.units, self.units), initializer='uniform', name='recurrent_kernel') self.constant_kernel = self.add_weight( shape=(constant_shape[-1], self.units), initializer='uniform', name='constant_kernel') self.built = True def call(self, inputs, states, constants): [prev_output] = states [constant] = constants h_input = K.dot(inputs, self.input_kernel) h_state = K.dot(prev_output, self.recurrent_kernel) h_const = K.dot(constant, self.constant_kernel) output = h_input + h_state + h_const return output, [output] def get_config(self): config = {'units': self.units} base_config = super(RNNCellWithConstants, self).get_config() return dict(list(base_config.items()) + list(config.items())) # Test basic case. x = Input((5, 5)) c = Input((3,)) cell = RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants': RNNCellWithConstants} with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell)) y = layer(x, constants=c) model = Model([x, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 3))], np.zeros((6, 64)) ) # Test basic case serialization. x_np = np.random.random((6, 5, 5)) c_np = np.random.random((6, 3)) y_np = model.predict([x_np, c_np]) weights = model.get_weights() config = layer.get_config() with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, constants=c) model = Model([x, c], y) model.set_weights(weights) y_np_2 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # test flat list inputs with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, c]) model = Model([x, c], y) model.set_weights(weights) y_np_3 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support custom RNN cell yet') def test_Bidirectional_with_constants_layer_passing_initial_state(): class RNNCellWithConstants(Layer): def __init__(self, units, **kwargs): self.units = units self.state_size = units super(RNNCellWithConstants, self).__init__(**kwargs) def build(self, input_shape): if not isinstance(input_shape, list): raise TypeError('expects constants shape') [input_shape, constant_shape] = input_shape # will (and should) raise if more than one constant passed self.input_kernel = self.add_weight( shape=(input_shape[-1], self.units), initializer='uniform', name='kernel') self.recurrent_kernel = self.add_weight( shape=(self.units, self.units), initializer='uniform', name='recurrent_kernel') self.constant_kernel = self.add_weight( shape=(constant_shape[-1], self.units), initializer='uniform', name='constant_kernel') self.built = True def call(self, inputs, states, constants): [prev_output] = states [constant] = constants h_input = K.dot(inputs, self.input_kernel) h_state = K.dot(prev_output, self.recurrent_kernel) h_const = K.dot(constant, self.constant_kernel) output = h_input + h_state + h_const return output, [output] def get_config(self): config = {'units': self.units} base_config = super(RNNCellWithConstants, self).get_config() return dict(list(base_config.items()) + list(config.items())) # Test basic case. x = Input((5, 5)) c = Input((3,)) s_for = Input((32,)) s_bac = Input((32,)) cell = RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants': RNNCellWithConstants} with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell)) y = layer(x, initial_state=[s_for, s_bac], constants=c) model = Model([x, s_for, s_bac, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 32)), np.zeros((6, 32)), np.zeros((6, 3))], np.zeros((6, 64)) ) # Test basic case serialization. x_np = np.random.random((6, 5, 5)) s_fw_np = np.random.random((6, 32)) s_bk_np = np.random.random((6, 32)) c_np = np.random.random((6, 3)) y_np = model.predict([x_np, s_fw_np, s_bk_np, c_np]) weights = model.get_weights() config = layer.get_config() with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, initial_state=[s_for, s_bac], constants=c) model = Model([x, s_for, s_bac, c], y) model.set_weights(weights) y_np_2 = model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # verify that state is used y_np_2_different_s = model.predict([x_np, s_fw_np + 10., s_bk_np + 10., c_np]) with pytest.raises(AssertionError): assert_allclose(y_np, y_np_2_different_s, atol=1e-4) # test flat list inputs with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, s_for, s_bac, c]) model = Model([x, s_for, s_bac, c], y) model.set_weights(weights) y_np_3 = model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) def test_Bidirectional_trainable(): # test layers that need learning_phase to be set x = Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) _ = layer(x) assert len(layer.trainable_weights) == 6 layer.trainable = False assert len(layer.trainable_weights) == 0 layer.trainable = True assert len(layer.trainable_weights) == 6 def test_Bidirectional_updates(): x = Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) assert len(layer.updates) == 0 assert len(layer.get_updates_for(None)) == 0 assert len(layer.get_updates_for(x)) == 0 layer.forward_layer.add_update(0, inputs=x) layer.forward_layer.add_update(1, inputs=None) layer.backward_layer.add_update(0, inputs=x) layer.backward_layer.add_update(1, inputs=None) assert len(layer.updates) == 4 assert len(layer.get_updates_for(None)) == 2 assert len(layer.get_updates_for(x)) == 2 def test_Bidirectional_losses(): x = Input(shape=(3, 2)) layer = wrappers.Bidirectional( layers.SimpleRNN(3, kernel_regularizer='l1', bias_regularizer='l1')) _ = layer(x) assert len(layer.losses) == 4 assert len(layer.get_losses_for(None)) == 4 assert len(layer.get_losses_for(x)) == 0 layer.forward_layer.add_loss(0, inputs=x) layer.forward_layer.add_loss(1, inputs=None) layer.backward_layer.add_loss(0, inputs=x) layer.backward_layer.add_loss(1, inputs=None) assert len(layer.losses) == 8 assert len(layer.get_losses_for(None)) == 6 assert len(layer.get_losses_for(x)) == 2 if __name__ == '__main__': pytest.main([__file__])
[((313, 1, 313, 77), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(313, 25, 313, 37): '"""merge_mode"""', (313, 39, 313, 76): "['sum', 'mul', 'ave', 'concat', None]"}, {}), "('merge_mode', ['sum', 'mul', 'ave', 'concat', None])", False, 'import pytest\n'), ((374, 1, 374, 63), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(374, 25, 374, 37): '"""merge_mode"""', (374, 39, 374, 62): "['sum', 'concat', None]"}, {}), "('merge_mode', ['sum', 'concat', None])", False, 'import pytest\n'), ((18, 12, 18, 24), 'keras.models.Sequential', 'Sequential', ({}, {}), '()', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((30, 17, 30, 44), 'numpy.random.random', 'np.random.random', ({(30, 34, 30, 43): '(1, 3, 4)'}, {}), '((1, 3, 4))', True, 'import numpy as np\n'), ((34, 16, 34, 28), 'keras.models.Sequential', 'Sequential', ({}, {}), '()', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((42, 4, 42, 62), 'numpy.testing.assert_allclose', 'assert_allclose', (), '', False, 'from numpy.testing import assert_allclose\n'), ((45, 12, 45, 24), 'keras.models.Sequential', 'Sequential', ({}, {}), '()', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((54, 17, 54, 69), 'numpy.random.randint', 'np.random.randint', (), '', True, 'import numpy as np\n'), ((58, 16, 58, 28), 'keras.models.Sequential', 'Sequential', ({}, {}), '()', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((65, 4, 65, 62), 'numpy.testing.assert_allclose', 'assert_allclose', (), '', False, 'from numpy.testing import assert_allclose\n'), ((68, 12, 68, 24), 'keras.models.Sequential', 'Sequential', ({}, {}), '()', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((81, 12, 81, 24), 'keras.models.Sequential', 'Sequential', ({}, {}), '()', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((91, 12, 91, 24), 'keras.models.Sequential', 'Sequential', ({}, {}), '()', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((93, 18, 93, 30), 'keras.models.Sequential', 'Sequential', ({}, {}), '()', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((100, 8, 100, 27), 'keras.layers.Input', 'Input', (), '', False, 'from keras.layers import wrappers, Input, Layer\n'), ((102, 18, 102, 29), 'keras.models.Model', 'Model', ({(102, 24, 102, 25): 'x', (102, 27, 102, 28): 'y'}, {}), '(x, y)', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((108, 12, 108, 24), 'keras.models.Sequential', 'Sequential', ({}, {}), '()', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((124, 10, 124, 39), 'keras.utils.generic_utils.object_list_uid', 'object_list_uid', ({(124, 26, 124, 38): 'model.inputs'}, {}), '(model.inputs)', False, 'from keras.utils.generic_utils import object_list_uid, to_list\n'), ((136, 4, 136, 24), 'numpy.random.seed', 'np.random.seed', ({(136, 19, 136, 23): '(1234)'}, {}), '(1234)', True, 'import numpy as np\n'), ((137, 8, 137, 27), 'keras.layers.Input', 'Input', (), '', False, 'from keras.layers import wrappers, Input, Layer\n'), ((139, 12, 139, 23), 'keras.models.Model', 'Model', ({(139, 18, 139, 19): 'x', (139, 21, 139, 22): 'y'}, {}), '(x, y)', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((148, 8, 148, 27), 'keras.layers.Input', 'Input', (), '', False, 'from keras.layers import wrappers, Input, Layer\n'), ((165, 12, 165, 24), 'keras.models.Sequential', 'Sequential', ({}, {}), '()', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((173, 18, 173, 82), 'numpy.random.randint', 'np.random.randint', (), '', True, 'import numpy as np\n'), ((181, 11, 181, 55), 'keras.backend.function', 'K.function', ({(181, 22, 181, 35): '[model.input]', (181, 37, 181, 54): 'mask_outputs[:-1]'}, {}), '([model.input], mask_outputs[:-1])', True, 'from keras import backend as K\n'), ((185, 21, 185, 52), 'numpy.any', 'np.any', (), '', True, 'import numpy as np\n'), ((196, 12, 196, 24), 'keras.models.Sequential', 'Sequential', ({}, {}), '()', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((201, 18, 201, 67), 'numpy.random.randint', 'np.random.randint', (), '', True, 'import numpy as np\n'), ((210, 11, 210, 50), 'keras.backend.function', 'K.function', ({(210, 22, 210, 35): '[model.input]', (210, 37, 210, 49): 'mask_outputs'}, {}), '([model.input], mask_outputs)', True, 'from keras import backend as K\n'), ((217, 12, 217, 24), 'keras.models.Sequential', 'Sequential', ({}, {}), '()', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((227, 12, 227, 24), 'keras.models.Sequential', 'Sequential', ({}, {}), '()', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((334, 13, 334, 36), 'keras.layers.Input', 'Input', ({(334, 19, 334, 35): '(timesteps, dim)'}, {}), '((timesteps, dim))', False, 'from keras.layers import wrappers, Input, Layer\n'), ((349, 13, 349, 36), 'keras.layers.Input', 'Input', ({(349, 19, 349, 35): '(timesteps, dim)'}, {}), '((timesteps, dim))', False, 'from keras.layers import wrappers, Input, Layer\n'), ((383, 13, 383, 36), 'keras.layers.Input', 'Input', ({(383, 19, 383, 35): '(timesteps, dim)'}, {}), '((timesteps, dim))', False, 'from keras.layers import wrappers, Input, Layer\n'), ((389, 13, 389, 36), 'keras.layers.Input', 'Input', ({(389, 19, 389, 35): '(timesteps, dim)'}, {}), '((timesteps, dim))', False, 'from keras.layers import wrappers, Input, Layer\n'), ((395, 12, 395, 34), 'keras.models.Model', 'Model', ({(395, 18, 395, 24): 'inputs', (395, 26, 395, 33): 'outputs'}, {}), '(inputs, outputs)', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((410, 13, 410, 36), 'keras.layers.Input', 'Input', ({(410, 19, 410, 35): '(timesteps, dim)'}, {}), '((timesteps, dim))', False, 'from keras.layers import wrappers, Input, Layer\n'), ((416, 13, 416, 36), 'keras.layers.Input', 'Input', ({(416, 19, 416, 35): '(timesteps, dim)'}, {}), '((timesteps, dim))', False, 'from keras.layers import wrappers, Input, Layer\n'), ((422, 12, 422, 43), 'keras.models.Model', 'Model', ({(422, 18, 422, 34): '[input1, input2]', (422, 36, 422, 42): 'output'}, {}), '([input1, input2], output)', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((474, 8, 474, 21), 'keras.layers.Input', 'Input', ({(474, 14, 474, 20): '(5, 5)'}, {}), '((5, 5))', False, 'from keras.layers import wrappers, Input, Layer\n'), ((475, 8, 475, 19), 'keras.layers.Input', 'Input', ({(475, 14, 475, 18): '(3,)'}, {}), '((3,))', False, 'from keras.layers import wrappers, Input, Layer\n'), ((481, 12, 481, 28), 'keras.models.Model', 'Model', ({(481, 18, 481, 24): '[x, c]', (481, 26, 481, 27): 'y'}, {}), '([x, c], y)', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((489, 11, 489, 38), 'numpy.random.random', 'np.random.random', ({(489, 28, 489, 37): '(6, 5, 5)'}, {}), '((6, 5, 5))', True, 'import numpy as np\n'), ((490, 11, 490, 35), 'numpy.random.random', 'np.random.random', ({(490, 28, 490, 34): '(6, 3)'}, {}), '((6, 3))', True, 'import numpy as np\n'), ((497, 12, 497, 28), 'keras.models.Model', 'Model', ({(497, 18, 497, 24): '[x, c]', (497, 26, 497, 27): 'y'}, {}), '([x, c], y)', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((500, 4, 500, 44), 'numpy.testing.assert_allclose', 'assert_allclose', (), '', False, 'from numpy.testing import assert_allclose\n'), ((506, 12, 506, 28), 'keras.models.Model', 'Model', ({(506, 18, 506, 24): '[x, c]', (506, 26, 506, 27): 'y'}, {}), '([x, c], y)', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((509, 4, 509, 44), 'numpy.testing.assert_allclose', 'assert_allclose', (), '', False, 'from numpy.testing import assert_allclose\n'), ((556, 8, 556, 21), 'keras.layers.Input', 'Input', ({(556, 14, 556, 20): '(5, 5)'}, {}), '((5, 5))', False, 'from keras.layers import wrappers, Input, Layer\n'), ((557, 8, 557, 19), 'keras.layers.Input', 'Input', ({(557, 14, 557, 18): '(3,)'}, {}), '((3,))', False, 'from keras.layers import wrappers, Input, Layer\n'), ((558, 12, 558, 24), 'keras.layers.Input', 'Input', ({(558, 18, 558, 23): '(32,)'}, {}), '((32,))', False, 'from keras.layers import wrappers, Input, Layer\n'), ((559, 12, 559, 24), 'keras.layers.Input', 'Input', ({(559, 18, 559, 23): '(32,)'}, {}), '((32,))', False, 'from keras.layers import wrappers, Input, Layer\n'), ((565, 12, 565, 42), 'keras.models.Model', 'Model', ({(565, 18, 565, 38): '[x, s_for, s_bac, c]', (565, 40, 565, 41): 'y'}, {}), '([x, s_for, s_bac, c], y)', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((574, 11, 574, 38), 'numpy.random.random', 'np.random.random', ({(574, 28, 574, 37): '(6, 5, 5)'}, {}), '((6, 5, 5))', True, 'import numpy as np\n'), ((575, 14, 575, 39), 'numpy.random.random', 'np.random.random', ({(575, 31, 575, 38): '(6, 32)'}, {}), '((6, 32))', True, 'import numpy as np\n'), ((576, 14, 576, 39), 'numpy.random.random', 'np.random.random', ({(576, 31, 576, 38): '(6, 32)'}, {}), '((6, 32))', True, 'import numpy as np\n'), ((577, 11, 577, 35), 'numpy.random.random', 'np.random.random', ({(577, 28, 577, 34): '(6, 3)'}, {}), '((6, 3))', True, 'import numpy as np\n'), ((584, 12, 584, 42), 'keras.models.Model', 'Model', ({(584, 18, 584, 38): '[x, s_for, s_bac, c]', (584, 40, 584, 41): 'y'}, {}), '([x, s_for, s_bac, c], y)', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((587, 4, 587, 44), 'numpy.testing.assert_allclose', 'assert_allclose', (), '', False, 'from numpy.testing import assert_allclose\n'), ((598, 12, 598, 42), 'keras.models.Model', 'Model', ({(598, 18, 598, 38): '[x, s_for, s_bac, c]', (598, 40, 598, 41): 'y'}, {}), '([x, s_for, s_bac, c], y)', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((601, 4, 601, 44), 'numpy.testing.assert_allclose', 'assert_allclose', (), '', False, 'from numpy.testing import assert_allclose\n'), ((606, 8, 606, 27), 'keras.layers.Input', 'Input', (), '', False, 'from keras.layers import wrappers, Input, Layer\n'), ((617, 8, 617, 27), 'keras.layers.Input', 'Input', (), '', False, 'from keras.layers import wrappers, Input, Layer\n'), ((632, 8, 632, 27), 'keras.layers.Input', 'Input', (), '', False, 'from keras.layers import wrappers, Input, Layer\n'), ((649, 4, 649, 27), 'pytest.main', 'pytest.main', ({(649, 16, 649, 26): '[__file__]'}, {}), '([__file__])', False, 'import pytest\n'), ((20, 14, 20, 39), 'keras.layers.Activation', 'layers.Activation', ({(20, 32, 20, 38): '"""relu"""'}, {}), "('relu')", False, 'from keras import layers\n'), ((22, 14, 22, 42), 'numpy.random.random', 'np.random.random', ({(22, 31, 22, 41): '(10, 3, 4)'}, {}), '((10, 3, 4))', True, 'import numpy as np\n'), ((22, 44, 22, 72), 'numpy.random.random', 'np.random.random', ({(22, 61, 22, 71): '(10, 3, 2)'}, {}), '((10, 3, 2))', True, 'import numpy as np\n'), ((37, 18, 37, 43), 'keras.layers.Activation', 'layers.Activation', ({(37, 36, 37, 42): '"""relu"""'}, {}), "('relu')", False, 'from keras import layers\n'), ((50, 14, 50, 66), 'numpy.random.randint', 'np.random.randint', (), '', True, 'import numpy as np\n'), ((51, 14, 51, 45), 'numpy.random.random', 'np.random.random', ({(51, 31, 51, 44): '(10, 3, 4, 6)'}, {}), '((10, 3, 4, 6))', True, 'import numpy as np\n'), ((72, 14, 72, 39), 'keras.layers.Activation', 'layers.Activation', ({(72, 32, 72, 38): '"""relu"""'}, {}), "('relu')", False, 'from keras import layers\n'), ((74, 25, 74, 58), 'numpy.random.random', 'np.random.random', ({(74, 42, 74, 57): '(1, 2, 4, 4, 3)'}, {}), '((1, 2, 4, 4, 3))', True, 'import numpy as np\n'), ((75, 25, 75, 58), 'numpy.random.random', 'np.random.random', ({(75, 42, 75, 57): '(1, 2, 4, 4, 5)'}, {}), '((1, 2, 4, 4, 5))', True, 'import numpy as np\n'), ((84, 14, 84, 39), 'keras.layers.Activation', 'layers.Activation', ({(84, 32, 84, 38): '"""relu"""'}, {}), "('relu')", False, 'from keras import layers\n'), ((87, 14, 87, 42), 'numpy.random.random', 'np.random.random', ({(87, 31, 87, 41): '(10, 3, 4)'}, {}), '((10, 3, 4))', True, 'import numpy as np\n'), ((87, 44, 87, 72), 'numpy.random.random', 'np.random.random', ({(87, 61, 87, 71): '(10, 3, 3)'}, {}), '((10, 3, 3))', True, 'import numpy as np\n'), ((92, 14, 92, 42), 'keras.layers.Dense', 'layers.Dense', (), '', False, 'from keras import layers\n'), ((94, 20, 94, 71), 'keras.layers.wrappers.TimeDistributed', 'wrappers.TimeDistributed', (), '', False, 'from keras.layers import wrappers, Input, Layer\n'), ((96, 20, 96, 48), 'numpy.random.random', 'np.random.random', ({(96, 37, 96, 47): '(10, 3, 2)'}, {}), '((10, 3, 2))', True, 'import numpy as np\n'), ((96, 50, 96, 78), 'numpy.random.random', 'np.random.random', ({(96, 67, 96, 77): '(10, 3, 3)'}, {}), '((10, 3, 3))', True, 'import numpy as np\n'), ((101, 8, 101, 39), 'keras.layers.wrappers.TimeDistributed', 'wrappers.TimeDistributed', ({(101, 33, 101, 38): 'model'}, {}), '(model)', False, 'from keras.layers import wrappers, Input, Layer\n'), ((104, 20, 104, 48), 'numpy.random.random', 'np.random.random', ({(104, 37, 104, 47): '(10, 3, 2)'}, {}), '((10, 3, 2))', True, 'import numpy as np\n'), ((104, 50, 104, 78), 'numpy.random.random', 'np.random.random', ({(104, 67, 104, 77): '(10, 3, 3)'}, {}), '((10, 3, 3))', True, 'import numpy as np\n'), ((115, 47, 115, 63), 'numpy.array', 'np.array', ({(115, 56, 115, 62): '[0, 0]'}, {}), '([0, 0])', True, 'import numpy as np\n'), ((116, 47, 116, 63), 'numpy.array', 'np.array', ({(116, 56, 116, 62): '[1, 1]'}, {}), '([1, 1])', True, 'import numpy as np\n'), ((118, 25, 118, 74), 'numpy.random.normal', 'np.random.normal', (), '', True, 'import numpy as np\n'), ((127, 11, 127, 42), 'keras.backend.int_shape', 'K.int_shape', ({(127, 23, 127, 41): 'td._input_map[uid]'}, {}), '(td._input_map[uid])', True, 'from keras import backend as K\n'), ((14, 20, 14, 31), 'keras.backend.backend', 'K.backend', ({}, {}), '()', True, 'from keras import backend as K\n'), ((140, 22, 140, 50), 'numpy.random.random', 'np.random.random', ({(140, 39, 140, 49): '(10, 3, 2)'}, {}), '((10, 3, 2))', True, 'import numpy as np\n'), ((141, 20, 141, 30), 'numpy.mean', 'np.mean', ({(141, 28, 141, 29): 'y'}, {}), '(y)', True, 'import numpy as np\n'), ((130, 20, 130, 31), 'keras.backend.backend', 'K.backend', ({}, {}), '()', True, 'from keras import backend as K\n'), ((132, 21, 132, 32), 'keras.backend.backend', 'K.backend', ({}, {}), '()', True, 'from keras import backend as K\n'), ((149, 37, 149, 64), 'keras.layers.BatchNormalization', 'layers.BatchNormalization', ({}, {}), '()', False, 'from keras import layers\n'), ((144, 20, 144, 31), 'keras.backend.backend', 'K.backend', ({}, {}), '()', True, 'from keras import backend as K\n'), ((171, 14, 171, 57), 'keras.layers.SimpleRNN', 'layers.SimpleRNN', (), '', False, 'from keras import layers\n'), ((177, 14, 177, 39), 'numpy.random.random', 'np.random.random', ({(177, 31, 177, 38): '(10, 1)'}, {}), '((10, 1))', True, 'import numpy as np\n'), ((188, 15, 188, 67), 'numpy.array_equal', 'np.array_equal', ({(188, 30, 188, 49): 'mask_outputs_val[i]', (188, 51, 188, 66): 'ref_mask_val[i]'}, {}), '(mask_outputs_val[i], ref_mask_val[i])', True, 'import numpy as np\n'), ((206, 14, 206, 42), 'numpy.random.random', 'np.random.random', ({(206, 31, 206, 41): '(10, 3, 5)'}, {}), '((10, 3, 5))', True, 'import numpy as np\n'), ((212, 47, 212, 75), 'numpy.any', 'np.any', (), '', True, 'import numpy as np\n'), ((213, 47, 213, 75), 'numpy.any', 'np.any', (), '', True, 'import numpy as np\n'), ((192, 20, 192, 31), 'keras.backend.backend', 'K.backend', ({}, {}), '()', True, 'from keras import backend as K\n'), ((220, 14, 220, 39), 'keras.layers.Activation', 'layers.Activation', ({(220, 32, 220, 38): '"""relu"""'}, {}), "('relu')", False, 'from keras import layers\n'), ((230, 14, 230, 39), 'keras.layers.Activation', 'layers.Activation', ({(230, 32, 230, 38): '"""relu"""'}, {}), "('relu')", False, 'from keras import layers\n'), ((243, 12, 243, 55), 'numpy.random.random', 'np.random.random', ({(243, 29, 243, 54): '(samples, timesteps, dim)'}, {}), '((samples, timesteps, dim))', True, 'import numpy as np\n'), ((245, 12, 245, 51), 'numpy.random.random', 'np.random.random', ({(245, 29, 245, 50): '(samples, target_dim)'}, {}), '((samples, target_dim))', True, 'import numpy as np\n'), ((248, 16, 248, 28), 'keras.models.Sequential', 'Sequential', ({}, {}), '()', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((262, 16, 262, 28), 'keras.models.Sequential', 'Sequential', ({}, {}), '()', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((272, 17, 272, 40), 'keras.layers.Input', 'Input', ({(272, 23, 272, 39): '(timesteps, dim)'}, {}), '((timesteps, dim))', False, 'from keras.layers import wrappers, Input, Layer\n'), ((276, 16, 276, 38), 'keras.models.Model', 'Model', ({(276, 22, 276, 28): 'inputs', (276, 30, 276, 37): 'outputs'}, {}), '(inputs, outputs)', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((281, 17, 281, 55), 'keras.layers.Input', 'Input', (), '', False, 'from keras.layers import wrappers, Input, Layer\n'), ((284, 16, 284, 38), 'keras.models.Model', 'Model', ({(284, 22, 284, 28): 'inputs', (284, 30, 284, 37): 'outputs'}, {}), '(inputs, outputs)', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((300, 12, 300, 55), 'numpy.random.random', 'np.random.random', ({(300, 29, 300, 54): '(samples, timesteps, dim)'}, {}), '((samples, timesteps, dim))', True, 'import numpy as np\n'), ((302, 12, 302, 51), 'numpy.random.random', 'np.random.random', ({(302, 29, 302, 50): '(samples, target_dim)'}, {}), '((samples, target_dim))', True, 'import numpy as np\n'), ((304, 17, 304, 35), 'keras.layers.Input', 'Input', ({(304, 23, 304, 34): '(None, dim)'}, {}), '((None, dim))', False, 'from keras.layers import wrappers, Input, Layer\n'), ((308, 16, 308, 38), 'keras.models.Model', 'Model', ({(308, 22, 308, 28): 'inputs', (308, 30, 308, 37): 'outputs'}, {}), '(inputs, outputs)', False, 'from keras.models import Sequential, Model, model_from_json\n'), ((289, 21, 289, 32), 'keras.backend.backend', 'K.backend', ({}, {}), '()', True, 'from keras import backend as K\n'), ((320, 9, 320, 48), 'numpy.random.rand', 'np.random.rand', ({(320, 24, 320, 31): 'samples', (320, 33, 320, 42): 'timesteps', (320, 44, 320, 47): 'dim'}, {}), '(samples, timesteps, dim)', True, 'import numpy as np\n'), ((346, 8, 346, 42), 'numpy.testing.assert_allclose', 'assert_allclose', (), '', False, 'from numpy.testing import assert_allclose\n'), ((363, 8, 363, 42), 'numpy.testing.assert_allclose', 'assert_allclose', (), '', False, 'from numpy.testing import assert_allclose\n'), ((370, 8, 370, 60), 'numpy.testing.assert_allclose', 'assert_allclose', (), '', False, 'from numpy.testing import assert_allclose\n'), ((381, 9, 381, 48), 'numpy.random.rand', 'np.random.rand', ({(381, 24, 381, 31): 'samples', (381, 33, 381, 42): 'timesteps', (381, 44, 381, 47): 'dim'}, {}), '(samples, timesteps, dim)', True, 'import numpy as np\n'), ((400, 8, 400, 42), 'numpy.testing.assert_allclose', 'assert_allclose', (), '', False, 'from numpy.testing import assert_allclose\n'), ((417, 9, 417, 34), 'pytest.raises', 'pytest.raises', ({(417, 23, 417, 33): 'ValueError'}, {}), '(ValueError)', False, 'import pytest\n'), ((425, 14, 425, 53), 'numpy.random.rand', 'np.random.rand', ({(425, 29, 425, 36): 'samples', (425, 38, 425, 47): 'timesteps', (425, 49, 425, 52): 'dim'}, {}), '(samples, timesteps, dim)', True, 'import numpy as np\n'), ((426, 14, 426, 53), 'numpy.random.rand', 'np.random.rand', ({(426, 29, 426, 36): 'samples', (426, 38, 426, 47): 'timesteps', (426, 49, 426, 52): 'dim'}, {}), '(samples, timesteps, dim)', True, 'import numpy as np\n'), ((478, 9, 478, 42), 'keras.utils.CustomObjectScope', 'CustomObjectScope', ({(478, 27, 478, 41): 'custom_objects'}, {}), '(custom_objects)', False, 'from keras.utils import CustomObjectScope\n'), ((485, 8, 485, 25), 'numpy.zeros', 'np.zeros', ({(485, 17, 485, 24): '(6, 64)'}, {}), '((6, 64))', True, 'import numpy as np\n'), ((494, 9, 494, 42), 'keras.utils.CustomObjectScope', 'CustomObjectScope', ({(494, 27, 494, 41): 'custom_objects'}, {}), '(custom_objects)', False, 'from keras.utils import CustomObjectScope\n'), ((503, 9, 503, 42), 'keras.utils.CustomObjectScope', 'CustomObjectScope', ({(503, 27, 503, 41): 'custom_objects'}, {}), '(custom_objects)', False, 'from keras.utils import CustomObjectScope\n'), ((430, 20, 430, 31), 'keras.backend.backend', 'K.backend', ({}, {}), '()', True, 'from keras import backend as K\n'), ((562, 9, 562, 42), 'keras.utils.CustomObjectScope', 'CustomObjectScope', ({(562, 27, 562, 41): 'custom_objects'}, {}), '(custom_objects)', False, 'from keras.utils import CustomObjectScope\n'), ((570, 8, 570, 25), 'numpy.zeros', 'np.zeros', ({(570, 17, 570, 24): '(6, 64)'}, {}), '((6, 64))', True, 'import numpy as np\n'), ((581, 9, 581, 42), 'keras.utils.CustomObjectScope', 'CustomObjectScope', ({(581, 27, 581, 41): 'custom_objects'}, {}), '(custom_objects)', False, 'from keras.utils import CustomObjectScope\n'), ((591, 9, 591, 38), 'pytest.raises', 'pytest.raises', ({(591, 23, 591, 37): 'AssertionError'}, {}), '(AssertionError)', False, 'import pytest\n'), ((592, 8, 592, 60), 'numpy.testing.assert_allclose', 'assert_allclose', (), '', False, 'from numpy.testing import assert_allclose\n'), ((595, 9, 595, 42), 'keras.utils.CustomObjectScope', 'CustomObjectScope', ({(595, 27, 595, 41): 'custom_objects'}, {}), '(custom_objects)', False, 'from keras.utils import CustomObjectScope\n'), ((512, 20, 512, 31), 'keras.backend.backend', 'K.backend', ({}, {}), '()', True, 'from keras import backend as K\n'), ((607, 35, 607, 54), 'keras.layers.SimpleRNN', 'layers.SimpleRNN', ({(607, 52, 607, 53): '3'}, {}), '(3)', False, 'from keras import layers\n'), ((618, 35, 618, 54), 'keras.layers.SimpleRNN', 'layers.SimpleRNN', ({(618, 52, 618, 53): '3'}, {}), '(3)', False, 'from keras import layers\n'), ((634, 8, 634, 75), 'keras.layers.SimpleRNN', 'layers.SimpleRNN', (), '', False, 'from keras import layers\n'), ((19, 39, 19, 54), 'keras.layers.Dense', 'layers.Dense', ({(19, 52, 19, 53): '(2)'}, {}), '(2)', False, 'from keras import layers\n'), ((35, 43, 35, 58), 'keras.layers.Dense', 'layers.Dense', ({(35, 56, 35, 57): '(2)'}, {}), '(2)', False, 'from keras import layers\n'), ((46, 39, 46, 61), 'keras.layers.Embedding', 'layers.Embedding', ({(46, 56, 46, 57): '(5)', (46, 59, 46, 60): '(6)'}, {}), '(5, 6)', False, 'from keras import layers\n'), ((59, 43, 59, 65), 'keras.layers.Embedding', 'layers.Embedding', ({(59, 60, 59, 61): '(5)', (59, 63, 59, 64): '(6)'}, {}), '(5, 6)', False, 'from keras import layers\n'), ((69, 39, 70, 68), 'keras.layers.Conv2D', 'layers.Conv2D', (), '', False, 'from keras import layers\n'), ((82, 39, 82, 54), 'keras.layers.Dense', 'layers.Dense', ({(82, 52, 82, 53): '(2)'}, {}), '(2)', False, 'from keras import layers\n'), ((83, 39, 83, 54), 'keras.layers.Dense', 'layers.Dense', ({(83, 52, 83, 53): '(3)'}, {}), '(3)', False, 'from keras import layers\n'), ((110, 8, 110, 58), 'keras.layers.BatchNormalization', 'layers.BatchNormalization', (), '', False, 'from keras import layers\n'), ((119, 41, 119, 57), 'numpy.array', 'np.array', ({(119, 50, 119, 56): '[0, 1]'}, {}), '([0, 1])', True, 'import numpy as np\n'), ((121, 51, 121, 67), 'numpy.array', 'np.array', ({(121, 60, 121, 66): '[0, 0]'}, {}), '([0, 0])', True, 'import numpy as np\n'), ((122, 51, 122, 67), 'numpy.array', 'np.array', ({(122, 60, 122, 66): '[1, 1]'}, {}), '([1, 1])', True, 'import numpy as np\n'), ((138, 33, 138, 53), 'keras.layers.Dropout', 'layers.Dropout', ({(138, 48, 138, 52): '0.999'}, {}), '(0.999)', False, 'from keras import layers\n'), ((166, 39, 166, 77), 'keras.layers.Embedding', 'layers.Embedding', (), '', False, 'from keras import layers\n'), ((169, 39, 169, 81), 'keras.layers.SimpleRNN', 'layers.SimpleRNN', (), '', False, 'from keras import layers\n'), ((170, 39, 170, 82), 'keras.layers.SimpleRNN', 'layers.SimpleRNN', (), '', False, 'from keras import layers\n'), ((161, 21, 161, 32), 'keras.backend.backend', 'K.backend', ({}, {}), '()', True, 'from keras import backend as K\n'), ((161, 46, 161, 57), 'keras.backend.backend', 'K.backend', ({}, {}), '()', True, 'from keras import backend as K\n'), ((197, 39, 197, 69), 'keras.layers.Masking', 'layers.Masking', (), '', False, 'from keras import layers\n'), ((199, 39, 199, 54), 'keras.layers.Dense', 'layers.Dense', ({(199, 52, 199, 53): '(5)'}, {}), '(5)', False, 'from keras import layers\n'), ((219, 8, 219, 48), 'keras.layers.Dense', 'layers.Dense', (), '', False, 'from keras import layers\n'), ((229, 8, 229, 50), 'keras.layers.Dense', 'layers.Dense', (), '', False, 'from keras import layers\n'), ((373, 20, 373, 31), 'keras.backend.backend', 'K.backend', ({}, {}), '()', True, 'from keras import backend as K\n'), ((373, 47, 373, 58), 'keras.backend.backend', 'K.backend', ({}, {}), '()', True, 'from keras import backend as K\n'), ((462, 22, 462, 54), 'keras.backend.dot', 'K.dot', ({(462, 28, 462, 34): 'inputs', (462, 36, 462, 53): 'self.input_kernel'}, {}), '(inputs, self.input_kernel)', True, 'from keras import backend as K\n'), ((463, 22, 463, 63), 'keras.backend.dot', 'K.dot', ({(463, 28, 463, 39): 'prev_output', (463, 41, 463, 62): 'self.recurrent_kernel'}, {}), '(prev_output, self.recurrent_kernel)', True, 'from keras import backend as K\n'), ((464, 22, 464, 59), 'keras.backend.dot', 'K.dot', ({(464, 28, 464, 36): 'constant', (464, 38, 464, 58): 'self.constant_kernel'}, {}), '(constant, self.constant_kernel)', True, 'from keras import backend as K\n'), ((479, 39, 479, 48), 'keras.layers.RNN', 'RNN', ({(479, 43, 479, 47): 'cell'}, {}), '(cell)', False, 'from keras.layers import RNN\n'), ((484, 9, 484, 28), 'numpy.zeros', 'np.zeros', ({(484, 18, 484, 27): '(6, 5, 5)'}, {}), '((6, 5, 5))', True, 'import numpy as np\n'), ((484, 30, 484, 46), 'numpy.zeros', 'np.zeros', ({(484, 39, 484, 45): '(6, 3)'}, {}), '((6, 3))', True, 'import numpy as np\n'), ((495, 51, 495, 72), 'copy.deepcopy', 'copy.deepcopy', ({(495, 65, 495, 71): 'config'}, {}), '(config)', False, 'import copy\n'), ((504, 51, 504, 72), 'copy.deepcopy', 'copy.deepcopy', ({(504, 65, 504, 71): 'config'}, {}), '(config)', False, 'import copy\n'), ((544, 22, 544, 54), 'keras.backend.dot', 'K.dot', ({(544, 28, 544, 34): 'inputs', (544, 36, 544, 53): 'self.input_kernel'}, {}), '(inputs, self.input_kernel)', True, 'from keras import backend as K\n'), ((545, 22, 545, 63), 'keras.backend.dot', 'K.dot', ({(545, 28, 545, 39): 'prev_output', (545, 41, 545, 62): 'self.recurrent_kernel'}, {}), '(prev_output, self.recurrent_kernel)', True, 'from keras import backend as K\n'), ((546, 22, 546, 59), 'keras.backend.dot', 'K.dot', ({(546, 28, 546, 36): 'constant', (546, 38, 546, 58): 'self.constant_kernel'}, {}), '(constant, self.constant_kernel)', True, 'from keras import backend as K\n'), ((563, 39, 563, 48), 'keras.layers.RNN', 'RNN', ({(563, 43, 563, 47): 'cell'}, {}), '(cell)', False, 'from keras.layers import RNN\n'), ((568, 9, 568, 28), 'numpy.zeros', 'np.zeros', ({(568, 18, 568, 27): '(6, 5, 5)'}, {}), '((6, 5, 5))', True, 'import numpy as np\n'), ((568, 30, 568, 47), 'numpy.zeros', 'np.zeros', ({(568, 39, 568, 46): '(6, 32)'}, {}), '((6, 32))', True, 'import numpy as np\n'), ((569, 9, 569, 26), 'numpy.zeros', 'np.zeros', ({(569, 18, 569, 25): '(6, 32)'}, {}), '((6, 32))', True, 'import numpy as np\n'), ((569, 28, 569, 44), 'numpy.zeros', 'np.zeros', ({(569, 37, 569, 43): '(6, 3)'}, {}), '((6, 3))', True, 'import numpy as np\n'), ((582, 51, 582, 72), 'copy.deepcopy', 'copy.deepcopy', ({(582, 65, 582, 71): 'config'}, {}), '(config)', False, 'import copy\n'), ((596, 51, 596, 72), 'copy.deepcopy', 'copy.deepcopy', ({(596, 65, 596, 71): 'config'}, {}), '(config)', False, 'import copy\n'), ((329, 38, 329, 73), 'numpy.concatenate', 'np.concatenate', (), '', True, 'import numpy as np\n')]
code-annotator/tornado-annotated
src/tornado-3.2.2/tornado/platform/common.py
78fa3ab3b87a559c1db9ec11d86d79f6bf47853c
"""Lowest-common-denominator implementations of platform functionality.""" from __future__ import absolute_import, division, print_function, with_statement import errno import socket from tornado.platform import interface class Waker(interface.Waker): """Create an OS independent asynchronous pipe. For use on platforms that don't have os.pipe() (or where pipes cannot be passed to select()), but do have sockets. This includes Windows and Jython. """ def __init__(self): # Based on Zope async.py: http://svn.zope.org/zc.ngi/trunk/src/zc/ngi/async.py self.writer = socket.socket() # Disable buffering -- pulling the trigger sends 1 byte, # and we want that sent immediately, to wake up ASAP. self.writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) count = 0 while 1: count += 1 # Bind to a local port; for efficiency, let the OS pick # a free port for us. # Unfortunately, stress tests showed that we may not # be able to connect to that port ("Address already in # use") despite that the OS picked it. This appears # to be a race bug in the Windows socket implementation. # So we loop until a connect() succeeds (almost always # on the first try). See the long thread at # http://mail.zope.org/pipermail/zope/2005-July/160433.html # for hideous details. a = socket.socket() a.bind(("127.0.0.1", 0)) a.listen(1) connect_address = a.getsockname() # assigned (host, port) pair try: self.writer.connect(connect_address) break # success except socket.error as detail: if (not hasattr(errno, 'WSAEADDRINUSE') or detail[0] != errno.WSAEADDRINUSE): # "Address already in use" is the only error # I've seen on two WinXP Pro SP2 boxes, under # Pythons 2.3.5 and 2.4.1. raise # (10048, 'Address already in use') # assert count <= 2 # never triggered in Tim's tests if count >= 10: # I've never seen it go above 2 a.close() self.writer.close() raise socket.error("Cannot bind trigger!") # Close `a` and try again. Note: I originally put a short # sleep() here, but it didn't appear to help or hurt. a.close() self.reader, addr = a.accept() self.reader.setblocking(0) self.writer.setblocking(0) a.close() self.reader_fd = self.reader.fileno() def fileno(self): return self.reader.fileno() def write_fileno(self): return self.writer.fileno() def wake(self): try: self.writer.send(b"x") except (IOError, socket.error): pass def consume(self): try: while True: result = self.reader.recv(1024) if not result: break except (IOError, socket.error): pass def close(self): self.reader.close() self.writer.close()
[((20, 22, 20, 37), 'socket.socket', 'socket.socket', ({}, {}), '()', False, 'import socket\n'), ((38, 16, 38, 31), 'socket.socket', 'socket.socket', ({}, {}), '()', False, 'import socket\n'), ((57, 26, 57, 62), 'socket.error', 'socket.error', ({(57, 39, 57, 61): '"""Cannot bind trigger!"""'}, {}), "('Cannot bind trigger!')", False, 'import socket\n')]
poster515/BlinkyTape_Python
bathymetry_blink/bathymetry_blink.py
edc2f7e43fbf07dbfdeba60da7acb7ae7a3707d0
""" This script will modulate the blinky lights using the following algorithm: 1) uses user-provided location to obtain row of pixel data from bathy image 2) samples a 'number of LEDs' number of pixels from that row 3) shifts the sampled row data to center it at the location specified by user 4) displays resulting pixels on Blinky Tape 5) shifts next row by a given latitude, also specified by user 6) sleeps for user-specified period of time Uses the following arguments: -l/--location: tuple Location of the user in tuple(lat, lon). This represents the center of the LED strip. Defaults to (0, 0) -u/--update-interval: int Update interval of the script, in minutes. Defaults to 10. -p/--port: str Serial port of the BlinkyLight (e.g., 'ttyAMA0', 'COM3'). Defaults to 'COM5'. -d/--delta_latitude: int Vertical change in latitude every update rate. May be 0, but this will result in a never-changing LEDs. -i/--image: str Name of the PNG image that contains the color coded pathymetric data. The file current named mapserv.png was obtained using the following API: https://www.gebco.net/data_and_products/gebco_web_services/web_map_service/mapserv?request=getmap&service=wms&BBOX=-90,-180,90,180&format=image/png&height=600&width=1200&crs=EPSG:4326&layers=GEBCO_LATEST_SUB_ICE_TOPO&version=1.3.0 In lieu of providing command line arguments, you may alternatively edit the defaults in bath_config.json. NOTE: runs via: runfile('/BlinkyTape_Python/bathymetry_blink/bathymetry_blink.py', wdir='/BlinkyTape_Python/') (C) 2021 Joseph Post (https://joeycodes.dev) MIT Licensed """ import optparse import json from blinkytape import BlinkyTape from time import sleep from PIL import Image import numpy as np import sys MAX_ERRORS = 3 num_errors = 0 # Obtain default parameters with open("./bathymetry_blink/bathy_config.json") as f: config = json.load(f) # Default Blinky Tape port on Raspberry Pi is /dev/ttyACM0 parser = optparse.OptionParser() parser.add_option("-p", "--port", dest="portname", help="serial port (ex: /dev/ttyACM0)", default=config["port"]) parser.add_option("-l", "--location", dest="location", help="Location of the center of the LED strip (ex: 70,-110)", default=config["location"]) parser.add_option("-u", "--update-rate", dest="update_rate", help="How often to update elevation profile (mins) (ex: 5)", default=config["update_rate"]) parser.add_option("-d", "--delta-latitude", dest="delta_latitude", help="Change in latitude during update (ex: 5)", default=config["delta_latitude"]) parser.add_option("-n", "--num-leds", dest="num_leds", help="Number of LEDs in strip (ex: 60)", default=config["num_leds"]) parser.add_option("-i", "--image", dest="image_name", help="Name of the map/bathymetry image (ex: ./mapserv.png)", default=config["image"]) (options, args) = parser.parse_args() if args: print("Unknown parameters: " + args) # grab the values provided by user (or defaults) port = options.portname loc = options.location rate = options.update_rate delta = options.delta_latitude n_leds = options.num_leds i_name = options.image_name # Some visual indication that it works, for headless setups (green tape) bt = BlinkyTape(port, n_leds) bt.displayColor(0, 100, 0) bt.show() sleep(2) while True: try: # first, load image im = Image.open(i_name) # Can be many different formats. cols, rows = im.size a = np.asarray(im) # of shape (rows, cols, channels) # map loc latitude to 0-based index latitude_index = min(rows - 1, max(0, (int)(((loc[0] - -90) / (90 - -90)) * (rows - 0) + 0))) longitude_index = min(cols - 1, max(0, (int)(((loc[1] - -180) / (180 - -180)) * (cols - 0) + 0))) # update the location of the next row of elevation data to take loc[0] += delta loc[0] = ((loc[0] + 90) % 180) - 90 # wraps to next pole if overflow print("Lat index: " + str(latitude_index)) print("Lon index: " + str(longitude_index)) print("Next latitude: " + str(loc[0])) # grab the applicable pixel indices indices = [(int)(x*(cols/n_leds)) for x in range(n_leds)] # sample that row of pixel data output_pixels = np.take(a[latitude_index], indices, axis=0) # rotate the row to center around the specified longitude output_pixels = np.roll(output_pixels, longitude_index, axis=0) # send all pixel data to bt for pixel in output_pixels: print("Sending r: {}, g: {}, b: {}".format(*pixel)) bt.sendPixel(*pixel) # finally, show the image bt.show() # delete variables for memory management del a del im # Tape resets to stored pattern after a few seconds of inactivity sleep(rate * 60) # Wait specified number of minutes # sleep(10) # Wait specified number of minutes except KeyboardInterrupt: print("Keyboard interrupt, ending program.") sys.exit() except RuntimeError as e: print("Encountered runtime error: " + e.args[0]) # flush any incomplete data bt.show() num_errors += 1 if num_errors > MAX_ERRORS: sys.exit("Error count exceeds that allowed.")
[((54, 9, 54, 32), 'optparse.OptionParser', 'optparse.OptionParser', ({}, {}), '()', False, 'import optparse\n'), ((87, 5, 87, 29), 'blinkytape.BlinkyTape', 'BlinkyTape', ({(87, 16, 87, 20): 'port', (87, 22, 87, 28): 'n_leds'}, {}), '(port, n_leds)', False, 'from blinkytape import BlinkyTape\n'), ((90, 0, 90, 8), 'time.sleep', 'sleep', ({(90, 6, 90, 7): '(2)'}, {}), '(2)', False, 'from time import sleep\n'), ((51, 13, 51, 25), 'json.load', 'json.load', ({(51, 23, 51, 24): 'f'}, {}), '(f)', False, 'import json\n'), ((95, 13, 95, 31), 'PIL.Image.open', 'Image.open', ({(95, 24, 95, 30): 'i_name'}, {}), '(i_name)', False, 'from PIL import Image\n'), ((97, 12, 97, 26), 'numpy.asarray', 'np.asarray', ({(97, 23, 97, 25): 'im'}, {}), '(im)', True, 'import numpy as np\n'), ((115, 24, 115, 67), 'numpy.take', 'np.take', (), '', True, 'import numpy as np\n'), ((118, 24, 118, 71), 'numpy.roll', 'np.roll', (), '', True, 'import numpy as np\n'), ((133, 8, 133, 24), 'time.sleep', 'sleep', ({(133, 14, 133, 23): '(rate * 60)'}, {}), '(rate * 60)', False, 'from time import sleep\n'), ((138, 8, 138, 18), 'sys.exit', 'sys.exit', ({}, {}), '()', False, 'import sys\n'), ((148, 12, 148, 57), 'sys.exit', 'sys.exit', ({(148, 21, 148, 56): '"""Error count exceeds that allowed."""'}, {}), "('Error count exceeds that allowed.')", False, 'import sys\n')]
SFDigitalServices/pts-dispatcher-microservice-py
service/transforms/export_submissions.py
80ec68d9d7f3f120a708717ed92c8b5a16742ff3
""" Export Submissions Transform module """ #pylint: disable=too-few-public-methods import pandas as pd from .transform import TransformBase from ..resources.field_configs import FieldConfigs from ..resources.field_maps import FieldMaps class ExportSubmissionsTransform(TransformBase): """ Transform for Export Submissions """ def transform(self, data, sep): """ transform submissions from export """ output = list(map(self.get_data, data)) output = list(map(self.pretty_format, output)) output = [i for i in output if i is not None] output = self.normalize(output) output = self.to_csv(output, sep) return output # pylint: disable=R0201 def get_data(self, submission): """ Get data from submission object """ # skip permit type = existingPermitApplication submissions #pylint: disable=too-many-nested-blocks if submission['data']['permitType'] and submission['data']['permitType'] != 'existingPermitApplication': output = {} data = submission['data'] output['id'] = submission['_id'] output['created'] = submission['created'] #pylint: disable=too-many-nested-blocks for key in data: # flatten list values if isinstance(data[key], list): if len(data[key]) > 0: if isinstance(data[key][0], (int, str)): output[key] = ', '.join(map(str, data[key])) else: file_names = [] for index, val in enumerate(data[key]): # if storage, concat filename if 'storage' in val and 'originalName' in val: file_names.append(val['originalName']) else: output[key+str(index+1)] = val if len(file_names) > 0: output[key] = ', '.join(file_names) # flatten multi select values elif isinstance(data[key], dict): # building use code needs manual process if FieldConfigs.is_building_use(key): output[key] = self.convert_building_use(key, data[key], data) # flatten nested address fields elif FieldConfigs.is_nested_address_field(key): output = self.convert_address_fields(key, data[key], output) else: multi_selects = [] for multi_key, multi_value in data[key].items(): if multi_value: multi_selects.append(multi_key) output[key] = ', '.join(multi_selects) else: output[key] = data[key] return output def normalize(self, data): """ Normalize data into a flat structure into DataFrame """ dataframe = pd.json_normalize(data) # update column names dataframe.rename(columns=self.pretty_string, inplace=True) return dataframe def to_csv(self, dataframe, sep=','): """ Return CSV from DataFrame """ return dataframe.to_csv(index=False, sep=sep, line_terminator='\r\n') def pretty_format(self, data): """ Pretty format data fields """ output = {} if data: data = self.set_pts_fields(data) for key in data: if self.datetime_valid(data[key]): output[key] = self.pretty_time(data[key]) else: field_key = FieldConfigs.get_field_key(key, 'map') phone_appnum_key = FieldConfigs.get_field_key(key, 'pretty') if field_key is not None: output[key] = FieldMaps.map_key_value(field_key, data[key]) # manually add Fire Rating and proposed Fire Rating if field_key == 'construction_type' and data[key] != '': output = self.add_fire_rating(key, data[key], output) # format phone numbers and building application number elif phone_appnum_key is not None: if phone_appnum_key == 'phone_fields': output[key] = self.pretty_phonenumber(data[key]) # cleanse characters that break the csv elif isinstance(data[key], (str, bytes)): output[key] = data[key].replace('\n', '\t').replace('|', '') # relabel field, if necessary relabel_field = FieldConfigs.get_relabel_fields(key) if relabel_field: output[relabel_field] = output.pop(key) output = self.reorder_fields(output) return output
[((74, 20, 74, 43), 'pandas.json_normalize', 'pd.json_normalize', ({(74, 38, 74, 42): 'data'}, {}), '(data)', True, 'import pandas as pd\n')]
mgelbart/ray
python/ray/ml/tests/test_torch_trainer.py
4cec2286572e368a4bd64aae467751a384eff62d
import pytest import torch import ray from ray.ml.predictors.integrations.torch import TorchPredictor from ray.ml.train.integrations.torch import TorchTrainer from ray import train from ray.ml.examples.pytorch.torch_linear_example import train_func as linear_train_func @pytest.fixture def ray_start_4_cpus(): address_info = ray.init(num_cpus=4) yield address_info # The code after the yield will run as teardown code. ray.shutdown() @pytest.mark.parametrize("num_workers", [1, 2]) def test_torch_linear(ray_start_4_cpus, num_workers): def train_func(config): result = linear_train_func(config) assert len(result) == epochs assert result[-1]["loss"] < result[0]["loss"] num_workers = num_workers epochs = 3 scaling_config = {"num_workers": num_workers} config = {"lr": 1e-2, "hidden_size": 1, "batch_size": 4, "epochs": epochs} trainer = TorchTrainer( train_loop_per_worker=train_func, train_loop_config=config, scaling_config=scaling_config, ) trainer.fit() def test_torch_e2e(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1, 1) train.save_checkpoint(model=model) scaling_config = {"num_workers": 2} trainer = TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config ) result = trainer.fit() predict_dataset = ray.data.range(3) class TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint(result.checkpoint) def __call__(self, x): return self.pred.predict(x, dtype=torch.float) predictions = predict_dataset.map_batches( TorchScorer, batch_format="pandas", compute="actors" ) assert predictions.count() == 3 def test_torch_e2e_state_dict(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1, 1).state_dict() train.save_checkpoint(model=model) scaling_config = {"num_workers": 2} trainer = TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config ) result = trainer.fit() # If loading from a state dict, a model definition must be passed in. with pytest.raises(ValueError): TorchPredictor.from_checkpoint(result.checkpoint) class TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint( result.checkpoint, model=torch.nn.Linear(1, 1) ) def __call__(self, x): return self.pred.predict(x, dtype=torch.float) predict_dataset = ray.data.range(3) predictions = predict_dataset.map_batches( TorchScorer, batch_format="pandas", compute="actors" ) assert predictions.count() == 3 if __name__ == "__main__": import pytest import sys sys.exit(pytest.main(["-v", "-x", __file__]))
[((19, 1, 19, 47), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ({(19, 25, 19, 38): '"""num_workers"""', (19, 40, 19, 46): '[1, 2]'}, {}), "('num_workers', [1, 2])", False, 'import pytest\n'), ((13, 19, 13, 39), 'ray.init', 'ray.init', (), '', False, 'import ray\n'), ((16, 4, 16, 18), 'ray.shutdown', 'ray.shutdown', ({}, {}), '()', False, 'import ray\n'), ((30, 14, 34, 5), 'ray.ml.train.integrations.torch.TorchTrainer', 'TorchTrainer', (), '', False, 'from ray.ml.train.integrations.torch import TorchTrainer\n'), ((44, 14, 46, 5), 'ray.ml.train.integrations.torch.TorchTrainer', 'TorchTrainer', (), '', False, 'from ray.ml.train.integrations.torch import TorchTrainer\n'), ((49, 22, 49, 39), 'ray.data.range', 'ray.data.range', ({(49, 37, 49, 38): '3'}, {}), '(3)', False, 'import ray\n'), ((70, 14, 72, 5), 'ray.ml.train.integrations.torch.TorchTrainer', 'TorchTrainer', (), '', False, 'from ray.ml.train.integrations.torch import TorchTrainer\n'), ((88, 22, 88, 39), 'ray.data.range', 'ray.data.range', ({(88, 37, 88, 38): '3'}, {}), '(3)', False, 'import ray\n'), ((22, 17, 22, 42), 'ray.ml.examples.pytorch.torch_linear_example.train_func', 'linear_train_func', ({(22, 35, 22, 41): 'config'}, {}), '(config)', True, 'from ray.ml.examples.pytorch.torch_linear_example import train_func as linear_train_func\n'), ((40, 16, 40, 37), 'torch.nn.Linear', 'torch.nn.Linear', ({(40, 32, 40, 33): '1', (40, 35, 40, 36): '1'}, {}), '(1, 1)', False, 'import torch\n'), ((41, 8, 41, 42), 'ray.train.save_checkpoint', 'train.save_checkpoint', (), '', False, 'from ray import train\n'), ((67, 8, 67, 42), 'ray.train.save_checkpoint', 'train.save_checkpoint', (), '', False, 'from ray import train\n'), ((76, 9, 76, 34), 'pytest.raises', 'pytest.raises', ({(76, 23, 76, 33): 'ValueError'}, {}), '(ValueError)', False, 'import pytest\n'), ((77, 8, 77, 57), 'ray.ml.predictors.integrations.torch.TorchPredictor.from_checkpoint', 'TorchPredictor.from_checkpoint', ({(77, 39, 77, 56): 'result.checkpoint'}, {}), '(result.checkpoint)', False, 'from ray.ml.predictors.integrations.torch import TorchPredictor\n'), ((99, 13, 99, 48), 'pytest.main', 'pytest.main', ({(99, 25, 99, 47): "['-v', '-x', __file__]"}, {}), "(['-v', '-x', __file__])", False, 'import pytest\n'), ((53, 24, 53, 73), 'ray.ml.predictors.integrations.torch.TorchPredictor.from_checkpoint', 'TorchPredictor.from_checkpoint', ({(53, 55, 53, 72): 'result.checkpoint'}, {}), '(result.checkpoint)', False, 'from ray.ml.predictors.integrations.torch import TorchPredictor\n'), ((66, 16, 66, 37), 'torch.nn.Linear', 'torch.nn.Linear', ({(66, 32, 66, 33): '1', (66, 35, 66, 36): '1'}, {}), '(1, 1)', False, 'import torch\n'), ((82, 41, 82, 62), 'torch.nn.Linear', 'torch.nn.Linear', ({(82, 57, 82, 58): '1', (82, 60, 82, 61): '1'}, {}), '(1, 1)', False, 'import torch\n')]
OpenIxia/ixnetwork_restpy
uhd_restpy/testplatform/sessions/ixnetwork/quicktest/learnframes_58e01d83db5d99bcabff902f5cf6ec51.py
f628db450573a104f327cf3c737ca25586e067ae
# MIT LICENSE # # Copyright 1997 - 2020 by IXIA Keysight # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from uhd_restpy.base import Base from uhd_restpy.files import Files from typing import List, Any, Union class LearnFrames(Base): """The learning frames that IxNetwork sends during the test. The LearnFrames class encapsulates a required learnFrames resource which will be retrieved from the server every time the property is accessed. """ __slots__ = () _SDM_NAME = 'learnFrames' _SDM_ATT_MAP = { 'FastPathEnable': 'fastPathEnable', 'FastPathLearnFrameSize': 'fastPathLearnFrameSize', 'FastPathNumFrames': 'fastPathNumFrames', 'FastPathRate': 'fastPathRate', 'LearnFrameSize': 'learnFrameSize', 'LearnFrequency': 'learnFrequency', 'LearnNumFrames': 'learnNumFrames', 'LearnRate': 'learnRate', 'LearnSendMacOnly': 'learnSendMacOnly', 'LearnSendRouterSolicitation': 'learnSendRouterSolicitation', 'LearnWaitTime': 'learnWaitTime', 'LearnWaitTimeBeforeTransmit': 'learnWaitTimeBeforeTransmit', } _SDM_ENUM_MAP = { 'learnFrequency': ['never', 'onBinaryIteration', 'oncePerFramesize', 'oncePerTest', 'onTrial'], } def __init__(self, parent, list_op=False): super(LearnFrames, self).__init__(parent, list_op) @property def FastPathEnable(self): # type: () -> bool """ Returns ------- - bool: If true, enables fast path transmit. """ return self._get_attribute(self._SDM_ATT_MAP['FastPathEnable']) @FastPathEnable.setter def FastPathEnable(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathEnable'], value) @property def FastPathLearnFrameSize(self): # type: () -> int """ Returns ------- - number: Specifies the size of the learning frames in the fast path. """ return self._get_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize']) @FastPathLearnFrameSize.setter def FastPathLearnFrameSize(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'], value) @property def FastPathNumFrames(self): # type: () -> int """ Returns ------- - number: Specifies the number of learn frames that IxNetwork sends through fast path. """ return self._get_attribute(self._SDM_ATT_MAP['FastPathNumFrames']) @FastPathNumFrames.setter def FastPathNumFrames(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathNumFrames'], value) @property def FastPathRate(self): # type: () -> int """ Returns ------- - number: Specifies the rate at which IxNetwork sends learn frames through fast path. """ return self._get_attribute(self._SDM_ATT_MAP['FastPathRate']) @FastPathRate.setter def FastPathRate(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathRate'], value) @property def LearnFrameSize(self): # type: () -> int """ Returns ------- - number: Specifies the size of the learning frames. """ return self._get_attribute(self._SDM_ATT_MAP['LearnFrameSize']) @LearnFrameSize.setter def LearnFrameSize(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrameSize'], value) @property def LearnFrequency(self): # type: () -> str """ Returns ------- - str(never | onBinaryIteration | oncePerFramesize | oncePerTest | onTrial): Allows to choose how frequently IxNetwork sends learning frames during the test. """ return self._get_attribute(self._SDM_ATT_MAP['LearnFrequency']) @LearnFrequency.setter def LearnFrequency(self, value): # type: (str) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrequency'], value) @property def LearnNumFrames(self): # type: () -> int """ Returns ------- - number: Specifies the number of learning frames that IxNetwork sends for each address. """ return self._get_attribute(self._SDM_ATT_MAP['LearnNumFrames']) @LearnNumFrames.setter def LearnNumFrames(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnNumFrames'], value) @property def LearnRate(self): # type: () -> int """ Returns ------- - number: Specifies the rate at which IxNetwork sends learn frames to the DUT. """ return self._get_attribute(self._SDM_ATT_MAP['LearnRate']) @LearnRate.setter def LearnRate(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnRate'], value) @property def LearnSendMacOnly(self): # type: () -> bool """ Returns ------- - bool: Sends learning frames to MAC address only. """ return self._get_attribute(self._SDM_ATT_MAP['LearnSendMacOnly']) @LearnSendMacOnly.setter def LearnSendMacOnly(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'], value) @property def LearnSendRouterSolicitation(self): # type: () -> bool """ Returns ------- - bool: Sends router solicitation messages. """ return self._get_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation']) @LearnSendRouterSolicitation.setter def LearnSendRouterSolicitation(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'], value) @property def LearnWaitTime(self): # type: () -> int """ Returns ------- - number: Specifies the length of time in ms that IxNetwork pauses before sending all the learning frames from all the ports. """ return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTime']) @LearnWaitTime.setter def LearnWaitTime(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTime'], value) @property def LearnWaitTimeBeforeTransmit(self): # type: () -> int """ Returns ------- - number: Specifies the length of time in ms that IxNetwork pauses before sending all the """ return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit']) @LearnWaitTimeBeforeTransmit.setter def LearnWaitTimeBeforeTransmit(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'], value) def update(self, FastPathEnable=None, FastPathLearnFrameSize=None, FastPathNumFrames=None, FastPathRate=None, LearnFrameSize=None, LearnFrequency=None, LearnNumFrames=None, LearnRate=None, LearnSendMacOnly=None, LearnSendRouterSolicitation=None, LearnWaitTime=None, LearnWaitTimeBeforeTransmit=None): # type: (bool, int, int, int, int, str, int, int, bool, bool, int, int) -> LearnFrames """Updates learnFrames resource on the server. Args ---- - FastPathEnable (bool): If true, enables fast path transmit. - FastPathLearnFrameSize (number): Specifies the size of the learning frames in the fast path. - FastPathNumFrames (number): Specifies the number of learn frames that IxNetwork sends through fast path. - FastPathRate (number): Specifies the rate at which IxNetwork sends learn frames through fast path. - LearnFrameSize (number): Specifies the size of the learning frames. - LearnFrequency (str(never | onBinaryIteration | oncePerFramesize | oncePerTest | onTrial)): Allows to choose how frequently IxNetwork sends learning frames during the test. - LearnNumFrames (number): Specifies the number of learning frames that IxNetwork sends for each address. - LearnRate (number): Specifies the rate at which IxNetwork sends learn frames to the DUT. - LearnSendMacOnly (bool): Sends learning frames to MAC address only. - LearnSendRouterSolicitation (bool): Sends router solicitation messages. - LearnWaitTime (number): Specifies the length of time in ms that IxNetwork pauses before sending all the learning frames from all the ports. - LearnWaitTimeBeforeTransmit (number): Specifies the length of time in ms that IxNetwork pauses before sending all the Raises ------ - ServerError: The server has encountered an uncategorized error condition """ return self._update(self._map_locals(self._SDM_ATT_MAP, locals())) def Apply(self, *args, **kwargs): # type: (*Any, **Any) -> None """Executes the apply operation on the server. Applies the specified Quick Test. apply(async_operation=bool) --------------------------- - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete. Raises ------ - NotFoundError: The requested resource does not exist on the server - ServerError: The server has encountered an uncategorized error condition """ payload = { "Arg1": self.href } for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('apply', payload=payload, response_object=None) def ApplyAsync(self, *args, **kwargs): # type: (*Any, **Any) -> None """Executes the applyAsync operation on the server. applyAsync(async_operation=bool) -------------------------------- - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete. Raises ------ - NotFoundError: The requested resource does not exist on the server - ServerError: The server has encountered an uncategorized error condition """ payload = { "Arg1": self.href } for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsync', payload=payload, response_object=None) def ApplyAsyncResult(self, *args, **kwargs): # type: (*Any, **Any) -> Union[bool, None] """Executes the applyAsyncResult operation on the server. applyAsyncResult(async_operation=bool)bool ------------------------------------------ - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete. - Returns bool: Raises ------ - NotFoundError: The requested resource does not exist on the server - ServerError: The server has encountered an uncategorized error condition """ payload = { "Arg1": self.href } for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsyncResult', payload=payload, response_object=None) def ApplyITWizardConfiguration(self, *args, **kwargs): # type: (*Any, **Any) -> None """Executes the applyITWizardConfiguration operation on the server. Applies the specified Quick Test. applyITWizardConfiguration(async_operation=bool) ------------------------------------------------ - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete. Raises ------ - NotFoundError: The requested resource does not exist on the server - ServerError: The server has encountered an uncategorized error condition """ payload = { "Arg1": self.href } for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyITWizardConfiguration', payload=payload, response_object=None) def GenerateReport(self, *args, **kwargs): # type: (*Any, **Any) -> Union[str, None] """Executes the generateReport operation on the server. Generate a PDF report for the last succesfull test run. generateReport(async_operation=bool)string ------------------------------------------ - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete. - Returns str: This method is asynchronous and has no return value. Raises ------ - NotFoundError: The requested resource does not exist on the server - ServerError: The server has encountered an uncategorized error condition """ payload = { "Arg1": self.href } for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('generateReport', payload=payload, response_object=None) def Run(self, *args, **kwargs): # type: (*Any, **Any) -> Union[List[str], None] """Executes the run operation on the server. Starts the specified Quick Test and waits for its execution to finish. The IxNetwork model allows for multiple method Signatures with the same name while python does not. run(async_operation=bool)list ----------------------------- - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete. - Returns list(str): This method is synchronous and returns the result of the test. run(InputParameters=string, async_operation=bool)list ----------------------------------------------------- - InputParameters (str): The input arguments of the test. - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete. - Returns list(str): This method is synchronous and returns the result of the test. Raises ------ - NotFoundError: The requested resource does not exist on the server - ServerError: The server has encountered an uncategorized error condition """ payload = { "Arg1": self.href } for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('run', payload=payload, response_object=None) def Start(self, *args, **kwargs): # type: (*Any, **Any) -> None """Executes the start operation on the server. Starts the specified Quick Test. The IxNetwork model allows for multiple method Signatures with the same name while python does not. start(async_operation=bool) --------------------------- - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete. start(InputParameters=string, async_operation=bool) --------------------------------------------------- - InputParameters (str): The input arguments of the test. - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete. Raises ------ - NotFoundError: The requested resource does not exist on the server - ServerError: The server has encountered an uncategorized error condition """ payload = { "Arg1": self.href } for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('start', payload=payload, response_object=None) def Stop(self, *args, **kwargs): # type: (*Any, **Any) -> None """Executes the stop operation on the server. Stops the currently running Quick Test. stop(async_operation=bool) -------------------------- - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete. Raises ------ - NotFoundError: The requested resource does not exist on the server - ServerError: The server has encountered an uncategorized error condition """ payload = { "Arg1": self.href } for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('stop', payload=payload, response_object=None) def WaitForTest(self, *args, **kwargs): # type: (*Any, **Any) -> Union[List[str], None] """Executes the waitForTest operation on the server. Waits for the execution of the specified Quick Test to be completed. waitForTest(async_operation=bool)list ------------------------------------- - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete. - Returns list(str): This method is synchronous and returns the result of the test. Raises ------ - NotFoundError: The requested resource does not exist on the server - ServerError: The server has encountered an uncategorized error condition """ payload = { "Arg1": self.href } for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('waitForTest', payload=payload, response_object=None)
[]
telminov/sonm-cdn-cms
core/serializers.py
e51107e3baed9e633e54db6cd7f784178f531b4a
from rest_framework import serializers from core import models class AssetSerializer(serializers.ModelSerializer): class Meta: model = models.Asset fields = '__all__'
[]
bernardocuteri/wasp
tests/wasp1/AllAnswerSets/aggregates_count_boundvariables_1.test.py
05c8f961776dbdbf7afbf905ee00fc262eba51ad
input = """ c(2). p(1). a(2). d(2,2,1). okay(X):- c(X), #count{V:a(V),d(V,X,1)} = 1. ouch(X):- p(X), #count{V:a(V),d(V,X,1)} = 1. """ output = """ {a(2), c(2), d(2,2,1), okay(2), p(1)} """
[]
Pzzzzz5142/animal-forest-QQ-group-bot
Pzzzzz/plugins/wm.py
a9141a212a7746ac95d28459ec9cec5b6c188b35
from nonebot import CommandSession, on_command from langdetect import detect, detect_langs from aiohttp import ClientSession from nonebot import get_bot from nonebot.argparse import ArgumentParser import time import hmac import random, sys import hashlib import binascii import urllib bot = get_bot() # 百度通用翻译API,不包含词典、tts语音合成等资源,如有相关需求请联系[email protected] # coding=utf-8 import hashlib import urllib import random @on_command("wm", aliases={"翻译", "translate"}, only_to_me=False) async def wm(session: CommandSession): session.get("token", prompt="请输入你想翻译的句子!") myurl = "/api/trans/vip/translate" q = session.state["token"] fromLang = session.state["fr"] # 原文语种 toLang = session.state["to"] # 译文语种 salt = random.randint(32768, 65536) sign = bot.config.BAIDUAPI + q + str(salt) + bot.config.BAIDUKey sign = hashlib.md5(sign.encode()).hexdigest() myurl = ( myurl + "?appid=" + bot.config.BAIDUAPI + "&q=" + urllib.parse.quote(q) + "&from=" + fromLang + "&to=" + toLang + "&salt=" + str(salt) + "&sign=" + sign ) async with ClientSession() as sess: async with sess.get("https://fanyi-api.baidu.com" + myurl) as resp: if resp.status != 200: pass ShitAns = await resp.json() try: ans = [i["dst"] for i in ShitAns["trans_result"]] ans = "\n".join(ans) except: session.finish("翻译错误,原因是:" + ShitAns["error_code"]) session.finish("翻译结果为:\n" + ans) @wm.args_parser async def _(session: CommandSession): arg = session.current_arg_text.strip() if session.is_first_run: parser = ArgumentParser(session=session) parser.add_argument("--fr", "-f", type=str, default="no") parser.add_argument("--to", "-t", type=str, default="no") parser.add_argument("token", type=str, default="", nargs="+") argv = parser.parse_args(session.current_arg.split(" ")) arg = " ".join(argv.token) if arg == "": session.pause("输入不能为空哦!") session.state["fr"] = detect(arg) if argv.fr == "no" else argv.fr if session.state["fr"][:2] == "zh": session.state["fr"] = "zh" if argv.to == "no": if session.state["fr"] == "zh": session.state["to"] = "en" else: session.state["to"] = "zh" else: session.state["to"] = argv.to if argv.fr == "no": session.state["fr"] = "auto" session.state["token"] = arg
[((13, 6, 13, 15), 'nonebot.get_bot', 'get_bot', ({}, {}), '()', False, 'from nonebot import get_bot\n'), ((22, 1, 22, 68), 'nonebot.on_command', 'on_command', (), '', False, 'from nonebot import CommandSession, on_command\n'), ((29, 11, 29, 39), 'random.randint', 'random.randint', ({(29, 26, 29, 31): '32768', (29, 33, 29, 38): '65536'}, {}), '(32768, 65536)', False, 'import random\n'), ((47, 15, 47, 30), 'aiohttp.ClientSession', 'ClientSession', ({}, {}), '()', False, 'from aiohttp import ClientSession\n'), ((66, 17, 66, 48), 'nonebot.argparse.ArgumentParser', 'ArgumentParser', (), '', False, 'from nonebot.argparse import ArgumentParser\n'), ((78, 26, 78, 37), 'langdetect.detect', 'detect', ({(78, 33, 78, 36): 'arg'}, {}), '(arg)', False, 'from langdetect import detect, detect_langs\n'), ((37, 10, 37, 31), 'urllib.parse.quote', 'urllib.parse.quote', ({(37, 29, 37, 30): 'q'}, {}), '(q)', False, 'import urllib\n')]
ParksProjets/Mips-Applications
home/scripts/memory/lpsolve.py
d4284a5ee357b0e5f348b9af28bb0d90c036ae99
""" LpSolve wrapper. Copyright (C) 2018, Guillaume Gonnet License MIT """ from ctypes import * import sys import os.path as path import platform # Import the DLL ver = ("x86", "x64")[sys.maxsize > 2**32] here = path.dirname(__file__) if sys.platform == "win32": lib = windll.LoadLibrary(path.abspath(path.join(here, "dll/lpsolve55-%s.dll" % ver))) elif sys.platform == "linux": lib = cdll.LoadLibrary(path.abspath(path.join(here, "dll/lpsolve55-%s.so" % ver))) else: raise ValueError("Can't load LpSolve library on this platform.") # Make the bindings c_double_p = POINTER(c_double) c_int_p = POINTER(c_int) lib.make_lp.argtypes = [c_int, c_int] lib.make_lp.restype = c_void_p lib.delete_lp.argtypes = [c_void_p] lib.set_binary.argtypes = [c_void_p, c_int, c_ubyte] lib.set_binary.restype = c_ubyte lib.set_int.argtypes = [c_void_p, c_int, c_ubyte] lib.set_int.restype = c_ubyte lib.add_constraintex.argtypes = [c_void_p, c_int, c_double_p, c_int_p, c_int, c_double] lib.add_constraintex.restype = c_ubyte lib.set_obj_fnex.argtypes = [c_void_p, c_int, c_double_p, c_int_p] lib.set_obj_fnex.restype = c_ubyte lib.set_add_rowmode.argtypes = [c_void_p, c_ubyte] lib.set_add_rowmode.restype = c_ubyte lib.set_maxim.argtypes = [c_void_p] lib.write_lp.argtypes = [c_void_p, c_char_p] lib.write_lp.restype = c_ubyte lib.set_verbose.argtypes = [c_void_p, c_int] lib.solve.argtypes = [c_void_p] lib.solve.restype = c_int lib.get_variables.argtypes = [c_void_p, c_double_p] lib.get_variables.restype = c_ubyte class LpEngine(object): "The Linear Programming Engine." def __init__(self, maxvars, debug=False): self.debug = debug self.maxvars = maxvars self.vars = [] self.lp = lib.make_lp(0, maxvars) assert self.lp != 0, "Can't construct a new LpSolve model" self.colbuff = (c_int * maxvars)() self.rowbuff = (c_double * maxvars)() lib.set_add_rowmode(self.lp, 1) def __del__(self): lib.delete_lp(self.lp) def constraint(self, const): "Add a new constraint into the model." assert const.optype is not None, "You must provide the RHS of constraint" const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.add_constraintex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p), const.optype, const.rhs) assert ret == 1, "Can't add constraint into model" def objective(self, const): "Set the objective function." lib.set_add_rowmode(self.lp, 0) const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.set_obj_fnex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p)) assert ret == 1, "Can't set objective function of model" def update_variables(self): "Update the variable values." ret = lib.get_variables(self.lp, cast(self.rowbuff, c_double_p)) assert ret == 1, "Can't get variable values" for i, var in enumerate(self.vars): var.value = self.rowbuff[i] def solve(self): "Solve the model." lib.set_maxim(self.lp) if self.debug: lib.write_lp(self.lp, b"debug-model.lp") else: lib.set_verbose(self.lp, 3) ret = lib.solve(self.lp) if ret == 0 or ret == 1: self.update_variables() return ret class LpVariable(object): "A LpSolve variable." def __init__(self, lp, vtype="real"): assert len(lp.vars) < lp.maxvars, "Can't add a variable: " self.index = len(lp.vars) + 1 self.value = None self.lp = lp lp.vars.append(self) self.type = "real" self.retype(vtype) def retype(self, vtype): "Change the type of the variable" if "bin" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype == "bin")) elif "int" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype == "int")) def __rmul__(self, num): return LpConstraint([num], [self]) def __add__(self, other): if isinstance(other, LpConstraint): return other.__add__(self) return LpConstraint([1, 1], [self, other]) class LpConstraint(object): "A LpSolve constraint." def __init__(self, numbers, vars): self.numbers = numbers self.vars = vars self.optype = None self.rhs = None def fill_buffers(self, colno, row): "Fill colno and row buffers for calling LpSolve." for i, (num, var) in enumerate(zip(self.numbers, self.vars)): colno[i] = var.index row[i] = num def __add__(self, other): if isinstance(other, LpVariable): return LpConstraint(self.numbers + [1], self.vars + [other]) else: c = LpConstraint(self.numbers + other.numbers, self.vars + other.vars) assert len(c.vars) == len(set(c.vars)), "Some variables appear several times" return c def __le__(self, val): self.optype, self.rhs = (1, val) return self def __eq__(self, val): self.optype, self.rhs = (3, val) return self def __ge__(self, val): self.optype, self.rhs = (2, val) return self
[((18, 7, 18, 29), 'os.path.dirname', 'path.dirname', ({(18, 20, 18, 28): '__file__'}, {}), '(__file__)', True, 'import os.path as path\n'), ((21, 42, 21, 87), 'os.path.join', 'path.join', ({(21, 52, 21, 56): 'here', (21, 58, 21, 86): "'dll/lpsolve55-%s.dll' % ver"}, {}), "(here, 'dll/lpsolve55-%s.dll' % ver)", True, 'import os.path as path\n'), ((23, 40, 23, 84), 'os.path.join', 'path.join', ({(23, 50, 23, 54): 'here', (23, 56, 23, 83): "'dll/lpsolve55-%s.so' % ver"}, {}), "(here, 'dll/lpsolve55-%s.so' % ver)", True, 'import os.path as path\n')]
mauroseb/octavia
octavia/tests/unit/controller/worker/v2/tasks/test_database_tasks.py
8f032d884e0f89ac69d5b6e5f5b77d19ee6eb1d7
# Copyright 2015 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # import random from cryptography import fernet import mock from oslo_db import exception as odb_exceptions from oslo_utils import uuidutils from sqlalchemy.orm import exc from taskflow.types import failure from octavia.common import constants from octavia.common import data_models from octavia.common import utils from octavia.controller.worker.v2.tasks import database_tasks from octavia.db import repositories as repo import octavia.tests.unit.base as base AMP_ID = uuidutils.generate_uuid() COMPUTE_ID = uuidutils.generate_uuid() LB_ID = uuidutils.generate_uuid() SERVER_GROUP_ID = uuidutils.generate_uuid() LB_NET_IP = '192.0.2.2' LISTENER_ID = uuidutils.generate_uuid() POOL_ID = uuidutils.generate_uuid() HM_ID = uuidutils.generate_uuid() MEMBER_ID = uuidutils.generate_uuid() PORT_ID = uuidutils.generate_uuid() SUBNET_ID = uuidutils.generate_uuid() VRRP_PORT_ID = uuidutils.generate_uuid() HA_PORT_ID = uuidutils.generate_uuid() L7POLICY_ID = uuidutils.generate_uuid() L7RULE_ID = uuidutils.generate_uuid() VIP_IP = '192.0.5.2' VRRP_IP = '192.0.5.3' HA_IP = '192.0.5.4' AMP_ROLE = 'FAKE_ROLE' VRRP_ID = random.randrange(255) VRRP_PRIORITY = random.randrange(100) CACHED_ZONE = 'zone1' IMAGE_ID = uuidutils.generate_uuid() COMPUTE_FLAVOR = uuidutils.generate_uuid() _amphora_mock = mock.MagicMock() _amphora_mock.id = AMP_ID _amphora_mock.compute_id = COMPUTE_ID _amphora_mock.lb_network_ip = LB_NET_IP _amphora_mock.vrrp_ip = VRRP_IP _amphora_mock.ha_ip = HA_IP _amphora_mock.ha_port_id = HA_PORT_ID _amphora_mock.vrrp_port_id = VRRP_PORT_ID _amphora_mock.role = AMP_ROLE _amphora_mock.vrrp_id = VRRP_ID _amphora_mock.vrrp_priority = VRRP_PRIORITY _amphorae = [_amphora_mock] _loadbalancer_mock = mock.MagicMock() _loadbalancer_mock.id = LB_ID _loadbalancer_mock.amphorae = [_amphora_mock] _l7policy_mock = mock.MagicMock() _l7policy_mock.id = L7POLICY_ID _l7rule_mock = mock.MagicMock() _l7rule_mock.id = L7RULE_ID _listener_mock = mock.MagicMock() _listener_to_dict_mock = mock.MagicMock( return_value={'id': LISTENER_ID}) _listener_mock.id = LISTENER_ID _listener_mock.to_dict = _listener_to_dict_mock _tf_failure_mock = mock.Mock(spec=failure.Failure) _vip_mock = mock.MagicMock() _vip_mock.port_id = PORT_ID _vip_mock.subnet_id = SUBNET_ID _vip_mock.ip_address = VIP_IP _vrrp_group_mock = mock.MagicMock() _cert_mock = mock.MagicMock() _compute_mock = mock.MagicMock() _compute_mock.lb_network_ip = LB_NET_IP _compute_mock.cached_zone = CACHED_ZONE _compute_mock.image_id = IMAGE_ID _compute_mock.compute_flavor = COMPUTE_FLAVOR @mock.patch('octavia.db.repositories.AmphoraRepository.delete') @mock.patch('octavia.db.repositories.AmphoraRepository.update') @mock.patch('octavia.db.repositories.ListenerRepository.update') @mock.patch('octavia.db.repositories.LoadBalancerRepository.update') @mock.patch('octavia.db.api.get_session', return_value='TEST') @mock.patch('octavia.controller.worker.v2.tasks.database_tasks.LOG') @mock.patch('oslo_utils.uuidutils.generate_uuid', return_value=AMP_ID) class TestDatabaseTasks(base.TestCase): def setUp(self): self.health_mon_mock = mock.MagicMock() self.health_mon_mock.id = HM_ID self.health_mon_mock.pool_id = POOL_ID self.listener_mock = mock.MagicMock() self.listener_mock.id = LISTENER_ID self.loadbalancer_mock = mock.MagicMock() self.loadbalancer_mock.id = LB_ID self.member_mock = mock.MagicMock() self.member_mock.id = MEMBER_ID self.db_pool_mock = mock.MagicMock() self.db_pool_mock.id = POOL_ID self.db_pool_mock.health_monitor = self.health_mon_mock self.member_mock = { constants.MEMBER_ID: MEMBER_ID, constants.POOL_ID: POOL_ID, } self.l7policy_mock = mock.MagicMock() self.l7policy_mock.id = L7POLICY_ID self.l7rule_mock = mock.MagicMock() self.l7rule_mock.id = L7RULE_ID self.l7rule_mock.l7policy = self.l7policy_mock super(TestDatabaseTasks, self).setUp() @mock.patch('octavia.db.repositories.AmphoraRepository.create', return_value=_amphora_mock) def test_create_amphora_in_db(self, mock_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): create_amp_in_db = database_tasks.CreateAmphoraInDB() amp_id = create_amp_in_db.execute() repo.AmphoraRepository.create.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.PENDING_CREATE, cert_busy=False) self.assertEqual(_amphora_mock.id, amp_id) # Test the revert create_amp_in_db.revert(_tf_failure_mock) self.assertFalse(mock_amphora_repo_delete.called) mock_amphora_repo_delete.reset_mock() create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') # Test revert with exception mock_amphora_repo_delete.reset_mock() mock_amphora_repo_delete.side_effect = Exception('fail') create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') @mock.patch('octavia.db.repositories.ListenerRepository.delete') def test_delete_listener_in_db(self, mock_listener_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_listener = database_tasks.DeleteListenerInDB() delete_listener.execute({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_called_once_with( 'TEST', id=LISTENER_ID) # Test the revert repo.ListenerRepository.delete.reset_mock() delete_listener.revert({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_not_called() @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDB() delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test Not Found Exception mock_health_mon_repo_delete.reset_mock() mock_health_mon_repo_delete.side_effect = [exc.NoResultFound()] delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db_by_pool(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDBByPool() delete_health_mon.execute(self.db_pool_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.db_pool_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # TODO(johnsom) fix once provisioning status added # repo.HealthMonitorRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, # provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.delete') def test_delete_member_in_db(self, mock_member_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_member = database_tasks.DeleteMemberInDB() delete_member.execute(self.member_mock) repo.MemberRepository.delete.assert_called_once_with( 'TEST', id=MEMBER_ID) # Test the revert mock_member_repo_delete.reset_mock() delete_member.revert(self.member_mock) # TODO(johnsom) Fix # repo.MemberRepository.delete.assert_called_once_with( # 'TEST', # MEMBER_ID) @mock.patch('octavia.db.repositories.PoolRepository.delete') def test_delete_pool_in_db(self, mock_pool_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_pool = database_tasks.DeletePoolInDB() delete_pool.execute(POOL_ID) repo.PoolRepository.delete.assert_called_once_with( 'TEST', id=POOL_ID) # Test the revert mock_pool_repo_delete.reset_mock() delete_pool.revert(POOL_ID) # TODO(johnsom) Fix # repo.PoolRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.delete') def test_delete_l7policy_in_db(self, mock_l7policy_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7policy = database_tasks.DeleteL7PolicyInDB() delete_l7policy.execute(_l7policy_mock) repo.L7PolicyRepository.delete.assert_called_once_with( 'TEST', id=L7POLICY_ID) # Test the revert mock_l7policy_repo_delete.reset_mock() delete_l7policy.revert(_l7policy_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.delete') def test_delete_l7rule_in_db(self, mock_l7rule_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7rule = database_tasks.DeleteL7RuleInDB() delete_l7rule.execute(_l7rule_mock) repo.L7RuleRepository.delete.assert_called_once_with( 'TEST', id=L7RULE_ID) # Test the revert mock_l7rule_repo_delete.reset_mock() delete_l7rule.revert(_l7rule_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) def test_reload_amphora(self, mock_amp_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_amp = database_tasks.ReloadAmphora() amp = reload_amp.execute(AMP_ID) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) self.assertEqual(_amphora_mock, amp) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_reload_load_balancer(self, mock_lb_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_lb = database_tasks.ReloadLoadBalancer() lb = reload_lb.execute(LB_ID) repo.LoadBalancerRepository.get.assert_called_once_with( 'TEST', id=LB_ID) self.assertEqual(_loadbalancer_mock, lb) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_after_allocation(self, mock_vip_update, mock_loadbalancer_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_vip = database_tasks.UpdateVIPAfterAllocation() loadbalancer = update_vip.execute(LB_ID, _vip_mock) self.assertEqual(_loadbalancer_mock, loadbalancer) mock_vip_update.assert_called_once_with('TEST', LB_ID, port_id=PORT_ID, subnet_id=SUBNET_ID, ip_address=VIP_IP) mock_loadbalancer_get.assert_called_once_with('TEST', id=LB_ID) def test_update_amphora_vip_data(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data = database_tasks.UpdateAmphoraeVIPData() update_amp_vip_data.execute(_amphorae) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amphora_vip_data2(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data2 = database_tasks.UpdateAmphoraVIPData() update_amp_vip_data2.execute(_amphorae[0]) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amp_failover_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_fo_details = database_tasks.UpdateAmpFailoverDetails() update_amp_fo_details.execute(_amphora_mock, _amphora_mock) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=VRRP_ID) @mock.patch('octavia.db.repositories.AmphoraRepository.associate') def test_associate_failover_amphora_with_lb_id( self, mock_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): assoc_fo_amp_lb_id = database_tasks.AssociateFailoverAmphoraWithLBID() assoc_fo_amp_lb_id.execute(AMP_ID, LB_ID) mock_associate.assert_called_once_with('TEST', load_balancer_id=LB_ID, amphora_id=AMP_ID) # Test revert assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) # Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, None) self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) # Test revert map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test revert with exception repo.LoadBalancerRepository.update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora_with_az(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute( self.loadbalancer_mock.id, availability_zone={ constants.COMPUTE_ZONE: 'fakeaz'}) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, 'fakeaz') self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) # Test revert map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test revert with exception repo.LoadBalancerRepository.update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_lb_amphorae_deleted_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = (database_tasks. MarkLBAmphoraeDeletedInDB()) mark_amp_deleted_in_db.execute(_loadbalancer_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.DELETED) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_amphora_allocated_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_allocated_in_db = (database_tasks. MarkAmphoraAllocatedInDB()) mark_amp_allocated_in_db.execute(_amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_ALLOCATED, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP, load_balancer_id=LB_ID) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_booting_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_booting_in_db = database_tasks.MarkAmphoraBootingInDB() mark_amp_booting_in_db.execute(_amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_BOOTING, compute_id=COMPUTE_ID) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) def test_mark_amphora_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = database_tasks.MarkAmphoraDeletedInDB() mark_amp_deleted_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.DELETED) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_delete_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_delete_in_db = (database_tasks. MarkAmphoraPendingDeleteInDB()) mark_amp_pending_delete_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_DELETE) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_update_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_update_in_db = (database_tasks. MarkAmphoraPendingUpdateInDB()) mark_amp_pending_update_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_UPDATE) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_ready_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _amphora_mock.lb_network_ip = LB_NET_IP mark_amp_ready_in_db = database_tasks.MarkAmphoraReadyInDB() mark_amp_ready_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_READY, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_update_amphora_info(self, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amphora_info = database_tasks.UpdateAmphoraInfo() update_amphora_info.execute(AMP_ID, _compute_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, lb_network_ip=LB_NET_IP, cached_zone=CACHED_ZONE, image_id=IMAGE_ID, compute_flavor=COMPUTE_FLAVOR) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) def test_mark_listener_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_deleted = database_tasks.MarkListenerDeletedInDB() mark_listener_deleted.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.DELETED) # Test the revert mock_listener_repo_update.reset_mock() mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) def test_mark_listener_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_pending_delete = (database_tasks. MarkListenerPendingDeleteInDB()) mark_listener_pending_delete.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_listener_repo_update.reset_mock() mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.ListenerRepository.' 'prov_status_active_if_not_error') def test_mark_lb_and_listeners_active_in_db(self, mock_list_not_error, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(LB_ID, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test with LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_list_not_error.reset_mock() listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(None, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test with no LB_ID mock_loadbalancer_repo_update.reset_mock() mark_lb_and_listeners_active.execute(None, []) mock_loadbalancer_repo_update.assert_not_called() # Test the revert mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert no LB_ID mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, []) mock_loadbalancer_repo_update.assert_not_called() mock_listener_repo_update.assert_not_called() # Test the revert with exceptions mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.common.tls_utils.cert_parser.get_cert_expiration', return_value=_cert_mock) def test_update_amphora_db_cert_exp(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete, mock_get_cert_exp): update_amp_cert = database_tasks.UpdateAmphoraDBCertExpiration() key = utils.get_six_compatible_server_certs_key_passphrase() fer = fernet.Fernet(key) _pem_mock = fer.encrypt( utils.get_six_compatible_value('test_cert') ) update_amp_cert.execute(_amphora_mock.id, _pem_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_expiration=_cert_mock) def test_update_amphora_cert_busy_to_false(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp_cert_busy_to_F = database_tasks.UpdateAmphoraCertBusyToFalse() amp_cert_busy_to_F.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_busy=False) def test_mark_LB_active_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_active = database_tasks.MarkLBActiveInDB() mark_loadbalancer_active.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_by_listener(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {'loadbalancer_id': LB_ID} mark_loadbalancer_active = database_tasks.MarkLBActiveInDBByListener() mark_loadbalancer_active.execute(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_and_listeners(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listeners = [data_models.Listener(id='listener1'), data_models.Listener(id='listener2')] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) @mock.patch('octavia.db.repositories.PoolRepository.update') @mock.patch('octavia.db.repositories.MemberRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_LB_active_in_db_full_graph(self, mock_l7r_repo_update, mock_l7p_repo_update, mock_hm_repo_update, mock_member_repo_update, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): unused_pool = data_models.Pool(id='unused_pool') members1 = [{constants.MEMBER_ID: 'member1'}, {constants.MEMBER_ID: 'member2'}] health_monitor = data_models.HealthMonitor(id='hm1') default_pool = data_models.Pool(id='default_pool', members=members1, health_monitor=health_monitor) listener1 = data_models.Listener(id='listener1', default_pool=default_pool) members2 = [{constants.MEMBER_ID: 'member3'}, {constants.MEMBER_ID: 'member4'}] redirect_pool = data_models.Pool(id='redirect_pool', members=members2) l7rules = [data_models.L7Rule(id='rule1')] redirect_policy = data_models.L7Policy(id='redirect_policy', redirect_pool=redirect_pool, l7rules=l7rules) l7policies = [redirect_policy] listener2 = data_models.Listener(id='listener2', l7policies=l7policies) listener2.l7policies = l7policies listeners = [listener1, listener2] pools = [default_pool, redirect_pool, unused_pool] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners, pools=pools) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ACTIVE), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mock_pool_repo_update.reset_mock() mock_member_repo_update.reset_mock() mock_hm_repo_update.reset_mock() mock_l7p_repo_update.reset_mock() mock_l7r_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ERROR), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ERROR)]) def test_mark_LB_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_deleted = database_tasks.MarkLBDeletedInDB() mark_loadbalancer_deleted.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.DELETED) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) def test_mark_LB_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_pending_delete = (database_tasks. MarkLBPendingDeleteInDB()) mark_loadbalancer_pending_delete.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_update_health_monitor_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_health_mon = database_tasks.UpdateHealthMonInDB() update_health_mon.execute(self.health_mon_mock, {'delay': 1, 'timeout': 2}) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, delay=1, timeout=2) # Test the revert mock_health_mon_repo_update.reset_mock() update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) def test_update_load_balancer_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2'}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') # Test the revert mock_loadbalancer_repo_update.reset_mock() update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_in_db_during_update_loadbalancer(self, mock_vip_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_lb_update, mock_listener_update, mock_amphora_update, mock_amphora_delete): self.loadbalancer_mock.vip.load_balancer_id = LB_ID update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2', 'vip': {'qos_policy_id': 'fool'}}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') repo.VipRepository.update.assert_called_once_with('TEST', LB_ID, qos_policy_id='fool') def test_update_listener_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_listener = database_tasks.UpdateListenerInDB() listener_dict = {constants.LISTENER_ID: LISTENER_ID} update_listener.execute(listener_dict, {'name': 'test', 'description': 'test2'}) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, name='test', description='test2') # Test the revert mock_listener_repo_update.reset_mock() update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_update_member_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_member = database_tasks.UpdateMemberInDB() update_member.execute(self.member_mock, {'weight': 1, 'ip_address': '10.1.0.0'}) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, weight=1, ip_address='10.1.0.0') # Test the revert mock_member_repo_update.reset_mock() update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch( 'octavia.db.repositories.Repositories.update_pool_and_sp') def test_update_pool_in_db(self, mock_repos_pool_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): sp_dict = {'type': 'SOURCE_IP', 'cookie_name': None} update_dict = {'name': 'test', 'description': 'test2', 'session_persistence': sp_dict} update_pool = database_tasks.UpdatePoolInDB() update_pool.execute(POOL_ID, update_dict) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, update_dict) # Test the revert mock_repos_pool_update.reset_mock() update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) # Test the revert with exception mock_repos_pool_update.reset_mock() mock_repos_pool_update.side_effect = Exception('fail') update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7policy_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7policy = database_tasks.UpdateL7PolicyInDB() update_l7policy.execute(self.l7policy_mock, {'action': constants.L7POLICY_ACTION_REJECT}) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, action=constants.L7POLICY_ACTION_REJECT) # Test the revert mock_l7policy_repo_update.reset_mock() update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7rule_in_db(self, mock_l7rule_repo_update, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7rule = database_tasks.UpdateL7RuleInDB() update_l7rule.execute( self.l7rule_mock, {'type': constants.L7RULE_TYPE_PATH, 'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH, 'value': '/api'}) repo.L7RuleRepository.update.assert_called_once_with( 'TEST', L7RULE_ID, type=constants.L7RULE_TYPE_PATH, compare_type=constants.L7RULE_COMPARE_TYPE_STARTS_WITH, value='/api') # Test the revert mock_l7rule_repo_update.reset_mock() update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) def test_get_amphora_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): get_amp_details = database_tasks.GetAmphoraDetails() new_amp = get_amp_details.execute(_amphora_mock) self.assertEqual(AMP_ID, new_amp.id) self.assertEqual(VRRP_IP, new_amp.vrrp_ip) self.assertEqual(HA_IP, new_amp.ha_ip) self.assertEqual(VRRP_PORT_ID, new_amp.vrrp_port_id) self.assertEqual(AMP_ROLE, new_amp.role) self.assertEqual(VRRP_ID, new_amp.vrrp_id) self.assertEqual(VRRP_PRIORITY, new_amp.vrrp_priority) def test_mark_amphora_role_indb(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_master_indb = database_tasks.MarkAmphoraMasterInDB() mark_amp_master_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='MASTER', vrrp_priority=constants.ROLE_MASTER_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_master_indb.revert("BADRESULT", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() failure_obj = failure.Failure.from_exception(Exception("TESTEXCEPT")) mark_amp_master_indb.revert(failure_obj, _amphora_mock) self.assertFalse(repo.AmphoraRepository.update.called) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb = database_tasks.MarkAmphoraBackupInDB() mark_amp_backup_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='BACKUP', vrrp_priority=constants.ROLE_BACKUP_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb.revert("BADRESULT", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb = database_tasks.MarkAmphoraStandAloneInDB() mark_amp_standalone_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='STANDALONE', vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb.revert("BADRESULT", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) # Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_standalone_indb.revert("BADRESULT", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_get_amphorae_from_loadbalancer(self, mock_amphora_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp1 = mock.MagicMock() amp1.id = uuidutils.generate_uuid() amp2 = mock.MagicMock() amp2.id = uuidutils.generate_uuid() lb = mock.MagicMock() lb.amphorae = [amp1, amp2] mock_amphora_get.side_effect = [_amphora_mock, None] get_amps_from_lb_obj = database_tasks.GetAmphoraeFromLoadbalancer() result = get_amps_from_lb_obj.execute(lb) self.assertEqual([_amphora_mock], result) @mock.patch('octavia.db.repositories.ListenerRepository.get') def test_get_listeners_from_loadbalancer(self, mock_listener_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_listener_get.return_value = _listener_mock _loadbalancer_mock.listeners = [_listener_mock] get_list_from_lb_obj = database_tasks.GetListenersFromLoadbalancer() result = get_list_from_lb_obj.execute(_loadbalancer_mock) mock_listener_get.assert_called_once_with('TEST', id=_listener_mock.id) self.assertEqual([{constants.LISTENER_ID: LISTENER_ID}], result) def test_get_vip_from_loadbalancer(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _loadbalancer_mock.vip = _vip_mock get_vip_from_lb_obj = database_tasks.GetVipFromLoadbalancer() result = get_vip_from_lb_obj.execute(_loadbalancer_mock) self.assertEqual(_vip_mock, result) @mock.patch('octavia.db.repositories.VRRPGroupRepository.create') def test_create_vrrp_group_for_lb(self, mock_vrrp_group_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_get_session.side_effect = ['TEST', odb_exceptions.DBDuplicateEntry] create_vrrp_group = database_tasks.CreateVRRPGroupForLB() create_vrrp_group.execute(_loadbalancer_mock) mock_vrrp_group_create.assert_called_once_with( 'TEST', load_balancer_id=LB_ID, vrrp_group_name=LB_ID.replace('-', ''), vrrp_auth_type=constants.VRRP_AUTH_DEFAULT, vrrp_auth_pass=mock_generate_uuid.return_value.replace('-', '')[0:7], advert_int=1) create_vrrp_group.execute(_loadbalancer_mock) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_amphora_health_monitoring(self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = database_tasks.DisableAmphoraHealthMonitoring() disable_amp_health.execute(_amphora_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_lb_amphorae_health_monitoring( self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = ( database_tasks.DisableLBAmphoraeHealthMonitoring()) disable_amp_health.execute(_loadbalancer_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_amphora_health_monitoring_busy(self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = database_tasks.MarkAmphoraHealthBusy() mark_busy.execute(_amphora_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_lb_amphorae_health_monitoring_busy( self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = ( database_tasks.MarkLBAmphoraeHealthBusy()) mark_busy.execute(_loadbalancer_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) def test_update_lb_server_group_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_server_group_info = database_tasks.UpdateLBServerGroupInDB() update_server_group_info.execute(LB_ID, SERVER_GROUP_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, server_group_id=SERVER_GROUP_ID) # Test the revert mock_listener_repo_update.reset_mock() update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) # Test the revert with exception mock_listener_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_active_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_active = (database_tasks.MarkHealthMonitorActiveInDB()) mark_health_mon_active.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_create_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_create = (database_tasks. MarkHealthMonitorPendingCreateInDB()) mark_health_mon_pending_create.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_delete_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_delete = (database_tasks. MarkHealthMonitorPendingDeleteInDB()) mark_health_mon_pending_delete.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_update_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_update = (database_tasks. MarkHealthMonitorPendingUpdateInDB()) mark_health_mon_pending_update.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_active_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_active = (database_tasks.MarkL7PolicyActiveInDB()) mark_l7policy_active.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_create_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_create = (database_tasks. MarkL7PolicyPendingCreateInDB()) mark_l7policy_pending_create.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_delete_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_delete = (database_tasks. MarkL7PolicyPendingDeleteInDB()) mark_l7policy_pending_delete.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_update_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_update = (database_tasks. MarkL7PolicyPendingUpdateInDB()) mark_l7policy_pending_update.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_active_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_active = (database_tasks.MarkL7RuleActiveInDB()) mark_l7rule_active.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_create_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_create = (database_tasks. MarkL7RulePendingCreateInDB()) mark_l7rule_pending_create.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_delete_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_delete = (database_tasks. MarkL7RulePendingDeleteInDB()) mark_l7rule_pending_delete.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_update_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_update = (database_tasks. MarkL7RulePendingUpdateInDB()) mark_l7rule_pending_update.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_active_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_active = (database_tasks.MarkMemberActiveInDB()) mark_member_active.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ACTIVE) # Test the revert mock_member_repo_update.reset_mock() mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_create_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_create = (database_tasks. MarkMemberPendingCreateInDB()) mark_member_pending_create.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_delete_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_delete = (database_tasks. MarkMemberPendingDeleteInDB()) mark_member_pending_delete.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_update_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_update = (database_tasks. MarkMemberPendingUpdateInDB()) mark_member_pending_update.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_active_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_active = (database_tasks.MarkPoolActiveInDB()) mark_pool_active.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.ACTIVE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_create_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_create = (database_tasks.MarkPoolPendingCreateInDB()) mark_pool_pending_create.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_delete_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_delete = (database_tasks.MarkPoolPendingDeleteInDB()) mark_pool_pending_delete.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_update_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_update = (database_tasks. MarkPoolPendingUpdateInDB()) mark_pool_pending_update.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update_pool_members') def test_update_pool_members_operating_status_in_db( self, mock_member_repo_update_pool_members, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_members = database_tasks.UpdatePoolMembersOperatingStatusInDB() update_members.execute(POOL_ID, constants.ONLINE) mock_member_repo_update_pool_members.assert_called_once_with( 'TEST', POOL_ID, operating_status=constants.ONLINE)
[((33, 9, 33, 34), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ({}, {}), '()', False, 'from oslo_utils import uuidutils\n'), ((34, 13, 34, 38), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ({}, {}), '()', False, 'from oslo_utils import uuidutils\n'), ((35, 8, 35, 33), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ({}, {}), '()', False, 'from oslo_utils import uuidutils\n'), ((36, 18, 36, 43), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ({}, {}), '()', False, 'from oslo_utils import uuidutils\n'), ((38, 14, 38, 39), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ({}, {}), '()', False, 'from oslo_utils import uuidutils\n'), ((39, 10, 39, 35), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ({}, {}), '()', False, 'from oslo_utils import uuidutils\n'), ((40, 8, 40, 33), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ({}, {}), '()', False, 'from oslo_utils import uuidutils\n'), ((41, 12, 41, 37), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ({}, {}), '()', False, 'from oslo_utils import uuidutils\n'), ((42, 10, 42, 35), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ({}, {}), '()', False, 'from oslo_utils import uuidutils\n'), ((43, 12, 43, 37), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ({}, {}), '()', False, 'from oslo_utils import uuidutils\n'), ((44, 15, 44, 40), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ({}, {}), '()', False, 'from oslo_utils import uuidutils\n'), ((45, 13, 45, 38), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ({}, {}), '()', False, 'from oslo_utils import uuidutils\n'), ((46, 14, 46, 39), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ({}, {}), '()', False, 'from oslo_utils import uuidutils\n'), ((47, 12, 47, 37), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ({}, {}), '()', False, 'from oslo_utils import uuidutils\n'), ((52, 10, 52, 31), 'random.randrange', 'random.randrange', ({(52, 27, 52, 30): '255'}, {}), '(255)', False, 'import random\n'), ((53, 16, 53, 37), 'random.randrange', 'random.randrange', ({(53, 33, 53, 36): '100'}, {}), '(100)', False, 'import random\n'), ((55, 11, 55, 36), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ({}, {}), '()', False, 'from oslo_utils import uuidutils\n'), ((56, 17, 56, 42), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ({}, {}), '()', False, 'from oslo_utils import uuidutils\n'), ((58, 16, 58, 32), 'mock.MagicMock', 'mock.MagicMock', ({}, {}), '()', False, 'import mock\n'), ((70, 21, 70, 37), 'mock.MagicMock', 'mock.MagicMock', ({}, {}), '()', False, 'import mock\n'), ((73, 17, 73, 33), 'mock.MagicMock', 'mock.MagicMock', ({}, {}), '()', False, 'import mock\n'), ((75, 15, 75, 31), 'mock.MagicMock', 'mock.MagicMock', ({}, {}), '()', False, 'import mock\n'), ((77, 17, 77, 33), 'mock.MagicMock', 'mock.MagicMock', ({}, {}), '()', False, 'import mock\n'), ((78, 25, 79, 37), 'mock.MagicMock', 'mock.MagicMock', (), '', False, 'import mock\n'), ((82, 19, 82, 50), 'mock.Mock', 'mock.Mock', (), '', False, 'import mock\n'), ((83, 12, 83, 28), 'mock.MagicMock', 'mock.MagicMock', ({}, {}), '()', False, 'import mock\n'), ((87, 19, 87, 35), 'mock.MagicMock', 'mock.MagicMock', ({}, {}), '()', False, 'import mock\n'), ((88, 13, 88, 29), 'mock.MagicMock', 'mock.MagicMock', ({}, {}), '()', False, 'import mock\n'), ((89, 16, 89, 32), 'mock.MagicMock', 'mock.MagicMock', ({}, {}), '()', False, 'import mock\n'), ((96, 1, 96, 63), 'mock.patch', 'mock.patch', ({(96, 12, 96, 62): '"""octavia.db.repositories.AmphoraRepository.delete"""'}, {}), "('octavia.db.repositories.AmphoraRepository.delete')", False, 'import mock\n'), ((97, 1, 97, 63), 'mock.patch', 'mock.patch', ({(97, 12, 97, 62): '"""octavia.db.repositories.AmphoraRepository.update"""'}, {}), "('octavia.db.repositories.AmphoraRepository.update')", False, 'import mock\n'), ((98, 1, 98, 64), 'mock.patch', 'mock.patch', ({(98, 12, 98, 63): '"""octavia.db.repositories.ListenerRepository.update"""'}, {}), "('octavia.db.repositories.ListenerRepository.update')", False, 'import mock\n'), ((99, 1, 99, 68), 'mock.patch', 'mock.patch', ({(99, 12, 99, 67): '"""octavia.db.repositories.LoadBalancerRepository.update"""'}, {}), "('octavia.db.repositories.LoadBalancerRepository.update')", False, 'import mock\n'), ((100, 1, 100, 62), 'mock.patch', 'mock.patch', (), '', False, 'import mock\n'), ((101, 1, 101, 68), 'mock.patch', 'mock.patch', ({(101, 12, 101, 67): '"""octavia.controller.worker.v2.tasks.database_tasks.LOG"""'}, {}), "('octavia.controller.worker.v2.tasks.database_tasks.LOG')", False, 'import mock\n'), ((102, 1, 102, 70), 'mock.patch', 'mock.patch', (), '', False, 'import mock\n'), ((138, 5, 139, 43), 'mock.patch', 'mock.patch', (), '', False, 'import mock\n'), ((181, 5, 181, 68), 'mock.patch', 'mock.patch', ({(181, 16, 181, 67): '"""octavia.db.repositories.ListenerRepository.delete"""'}, {}), "('octavia.db.repositories.ListenerRepository.delete')", False, 'import mock\n'), ((204, 5, 204, 73), 'mock.patch', 'mock.patch', ({(204, 16, 204, 72): '"""octavia.db.repositories.HealthMonitorRepository.update"""'}, {}), "('octavia.db.repositories.HealthMonitorRepository.update')", False, 'import mock\n'), ((205, 5, 205, 73), 'mock.patch', 'mock.patch', ({(205, 16, 205, 72): '"""octavia.db.repositories.HealthMonitorRepository.delete"""'}, {}), "('octavia.db.repositories.HealthMonitorRepository.delete')", False, 'import mock\n'), ((238, 5, 238, 73), 'mock.patch', 'mock.patch', ({(238, 16, 238, 72): '"""octavia.db.repositories.HealthMonitorRepository.update"""'}, {}), "('octavia.db.repositories.HealthMonitorRepository.update')", False, 'import mock\n'), ((239, 5, 239, 73), 'mock.patch', 'mock.patch', ({(239, 16, 239, 72): '"""octavia.db.repositories.HealthMonitorRepository.delete"""'}, {}), "('octavia.db.repositories.HealthMonitorRepository.delete')", False, 'import mock\n'), ((271, 5, 271, 66), 'mock.patch', 'mock.patch', ({(271, 16, 271, 65): '"""octavia.db.repositories.MemberRepository.delete"""'}, {}), "('octavia.db.repositories.MemberRepository.delete')", False, 'import mock\n'), ((299, 5, 299, 64), 'mock.patch', 'mock.patch', ({(299, 16, 299, 63): '"""octavia.db.repositories.PoolRepository.delete"""'}, {}), "('octavia.db.repositories.PoolRepository.delete')", False, 'import mock\n'), ((328, 5, 328, 68), 'mock.patch', 'mock.patch', ({(328, 16, 328, 67): '"""octavia.db.repositories.L7PolicyRepository.delete"""'}, {}), "('octavia.db.repositories.L7PolicyRepository.delete')", False, 'import mock\n'), ((357, 5, 357, 66), 'mock.patch', 'mock.patch', ({(357, 16, 357, 65): '"""octavia.db.repositories.L7RuleRepository.delete"""'}, {}), "('octavia.db.repositories.L7RuleRepository.delete')", False, 'import mock\n'), ((386, 5, 387, 43), 'mock.patch', 'mock.patch', (), '', False, 'import mock\n'), ((407, 5, 408, 48), 'mock.patch', 'mock.patch', (), '', False, 'import mock\n'), ((428, 5, 429, 48), 'mock.patch', 'mock.patch', (), '', False, 'import mock\n'), ((430, 5, 430, 63), 'mock.patch', 'mock.patch', ({(430, 16, 430, 62): '"""octavia.db.repositories.VipRepository.update"""'}, {}), "('octavia.db.repositories.VipRepository.update')", False, 'import mock\n'), ((516, 5, 516, 70), 'mock.patch', 'mock.patch', ({(516, 16, 516, 69): '"""octavia.db.repositories.AmphoraRepository.associate"""'}, {}), "('octavia.db.repositories.AmphoraRepository.associate')", False, 'import mock\n'), ((552, 5, 554, 50), 'mock.patch', 'mock.patch', (), '', False, 'import mock\n'), ((595, 5, 597, 50), 'mock.patch', 'mock.patch', (), '', False, 'import mock\n'), ((640, 5, 641, 43), 'mock.patch', 'mock.patch', (), '', False, 'import mock\n'), ((642, 5, 643, 48), 'mock.patch', 'mock.patch', (), '', False, 'import mock\n'), ((664, 5, 665, 43), 'mock.patch', 'mock.patch', (), '', False, 'import mock\n'), ((666, 5, 667, 48), 'mock.patch', 'mock.patch', (), '', False, 'import mock\n'), ((916, 5, 916, 64), 'mock.patch', 'mock.patch', ({(916, 16, 916, 63): '"""octavia.db.repositories.AmphoraRepository.get"""'}, {}), "('octavia.db.repositories.AmphoraRepository.get')", False, 'import mock\n'), ((1015, 5, 1016, 50), 'mock.patch', 'mock.patch', ({(1015, 16, 1016, 49): '"""octavia.db.repositories.ListenerRepository.prov_status_active_if_not_error"""'}, {}), "(\n 'octavia.db.repositories.ListenerRepository.prov_status_active_if_not_error'\n )", False, 'import mock\n'), ((1116, 5, 1117, 40), 'mock.patch', 'mock.patch', (), '', False, 'import mock\n'), ((1275, 5, 1275, 64), 'mock.patch', 'mock.patch', ({(1275, 16, 1275, 63): '"""octavia.db.repositories.PoolRepository.update"""'}, {}), "('octavia.db.repositories.PoolRepository.update')", False, 'import mock\n'), ((1276, 5, 1276, 66), 'mock.patch', 'mock.patch', ({(1276, 16, 1276, 65): '"""octavia.db.repositories.MemberRepository.update"""'}, {}), "('octavia.db.repositories.MemberRepository.update')", False, 'import mock\n'), ((1277, 5, 1277, 73), 'mock.patch', 'mock.patch', ({(1277, 16, 1277, 72): '"""octavia.db.repositories.HealthMonitorRepository.update"""'}, {}), "('octavia.db.repositories.HealthMonitorRepository.update')", False, 'import mock\n'), ((1278, 5, 1278, 68), 'mock.patch', 'mock.patch', ({(1278, 16, 1278, 67): '"""octavia.db.repositories.L7PolicyRepository.update"""'}, {}), "('octavia.db.repositories.L7PolicyRepository.update')", False, 'import mock\n'), ((1279, 5, 1279, 66), 'mock.patch', 'mock.patch', ({(1279, 16, 1279, 65): '"""octavia.db.repositories.L7RuleRepository.update"""'}, {}), "('octavia.db.repositories.L7RuleRepository.update')", False, 'import mock\n'), ((1462, 5, 1462, 73), 'mock.patch', 'mock.patch', ({(1462, 16, 1462, 72): '"""octavia.db.repositories.HealthMonitorRepository.update"""'}, {}), "('octavia.db.repositories.HealthMonitorRepository.update')", False, 'import mock\n'), ((1538, 5, 1538, 63), 'mock.patch', 'mock.patch', ({(1538, 16, 1538, 62): '"""octavia.db.repositories.VipRepository.update"""'}, {}), "('octavia.db.repositories.VipRepository.update')", False, 'import mock\n'), ((1600, 5, 1600, 66), 'mock.patch', 'mock.patch', ({(1600, 16, 1600, 65): '"""octavia.db.repositories.MemberRepository.update"""'}, {}), "('octavia.db.repositories.MemberRepository.update')", False, 'import mock\n'), ((1639, 5, 1640, 66), 'mock.patch', 'mock.patch', ({(1640, 8, 1640, 65): '"""octavia.db.repositories.Repositories.update_pool_and_sp"""'}, {}), "('octavia.db.repositories.Repositories.update_pool_and_sp')", False, 'import mock\n'), ((1682, 5, 1682, 68), 'mock.patch', 'mock.patch', ({(1682, 16, 1682, 67): '"""octavia.db.repositories.L7PolicyRepository.update"""'}, {}), "('octavia.db.repositories.L7PolicyRepository.update')", False, 'import mock\n'), ((1721, 5, 1721, 66), 'mock.patch', 'mock.patch', ({(1721, 16, 1721, 65): '"""octavia.db.repositories.L7RuleRepository.update"""'}, {}), "('octavia.db.repositories.L7RuleRepository.update')", False, 'import mock\n'), ((1722, 5, 1722, 68), 'mock.patch', 'mock.patch', ({(1722, 16, 1722, 67): '"""octavia.db.repositories.L7PolicyRepository.update"""'}, {}), "('octavia.db.repositories.L7PolicyRepository.update')", False, 'import mock\n'), ((1849, 5, 1849, 64), 'mock.patch', 'mock.patch', ({(1849, 16, 1849, 63): '"""octavia.db.repositories.AmphoraRepository.get"""'}, {}), "('octavia.db.repositories.AmphoraRepository.get')", False, 'import mock\n'), ((1872, 5, 1872, 65), 'mock.patch', 'mock.patch', ({(1872, 16, 1872, 64): '"""octavia.db.repositories.ListenerRepository.get"""'}, {}), "('octavia.db.repositories.ListenerRepository.get')", False, 'import mock\n'), ((1902, 5, 1902, 69), 'mock.patch', 'mock.patch', ({(1902, 16, 1902, 68): '"""octavia.db.repositories.VRRPGroupRepository.create"""'}, {}), "('octavia.db.repositories.VRRPGroupRepository.create')", False, 'import mock\n'), ((1926, 5, 1926, 73), 'mock.patch', 'mock.patch', ({(1926, 16, 1926, 72): '"""octavia.db.repositories.AmphoraHealthRepository.delete"""'}, {}), "('octavia.db.repositories.AmphoraHealthRepository.delete')", False, 'import mock\n'), ((1941, 5, 1941, 73), 'mock.patch', 'mock.patch', ({(1941, 16, 1941, 72): '"""octavia.db.repositories.AmphoraHealthRepository.delete"""'}, {}), "('octavia.db.repositories.AmphoraHealthRepository.delete')", False, 'import mock\n'), ((1958, 5, 1958, 73), 'mock.patch', 'mock.patch', ({(1958, 16, 1958, 72): '"""octavia.db.repositories.AmphoraHealthRepository.update"""'}, {}), "('octavia.db.repositories.AmphoraHealthRepository.update')", False, 'import mock\n'), ((1973, 5, 1973, 73), 'mock.patch', 'mock.patch', ({(1973, 16, 1973, 72): '"""octavia.db.repositories.AmphoraHealthRepository.update"""'}, {}), "('octavia.db.repositories.AmphoraHealthRepository.update')", False, 'import mock\n'), ((2016, 5, 2016, 73), 'mock.patch', 'mock.patch', ({(2016, 16, 2016, 72): '"""octavia.db.repositories.HealthMonitorRepository.update"""'}, {}), "('octavia.db.repositories.HealthMonitorRepository.update')", False, 'import mock\n'), ((2055, 5, 2055, 73), 'mock.patch', 'mock.patch', ({(2055, 16, 2055, 72): '"""octavia.db.repositories.HealthMonitorRepository.update"""'}, {}), "('octavia.db.repositories.HealthMonitorRepository.update')", False, 'import mock\n'), ((2095, 5, 2095, 73), 'mock.patch', 'mock.patch', ({(2095, 16, 2095, 72): '"""octavia.db.repositories.HealthMonitorRepository.update"""'}, {}), "('octavia.db.repositories.HealthMonitorRepository.update')", False, 'import mock\n'), ((2135, 5, 2135, 73), 'mock.patch', 'mock.patch', ({(2135, 16, 2135, 72): '"""octavia.db.repositories.HealthMonitorRepository.update"""'}, {}), "('octavia.db.repositories.HealthMonitorRepository.update')", False, 'import mock\n'), ((2175, 5, 2175, 68), 'mock.patch', 'mock.patch', ({(2175, 16, 2175, 67): '"""octavia.db.repositories.L7PolicyRepository.update"""'}, {}), "('octavia.db.repositories.L7PolicyRepository.update')", False, 'import mock\n'), ((2214, 5, 2214, 68), 'mock.patch', 'mock.patch', ({(2214, 16, 2214, 67): '"""octavia.db.repositories.L7PolicyRepository.update"""'}, {}), "('octavia.db.repositories.L7PolicyRepository.update')", False, 'import mock\n'), ((2253, 5, 2253, 68), 'mock.patch', 'mock.patch', ({(2253, 16, 2253, 67): '"""octavia.db.repositories.L7PolicyRepository.update"""'}, {}), "('octavia.db.repositories.L7PolicyRepository.update')", False, 'import mock\n'), ((2292, 5, 2292, 68), 'mock.patch', 'mock.patch', ({(2292, 16, 2292, 67): '"""octavia.db.repositories.L7PolicyRepository.update"""'}, {}), "('octavia.db.repositories.L7PolicyRepository.update')", False, 'import mock\n'), ((2331, 5, 2331, 66), 'mock.patch', 'mock.patch', ({(2331, 16, 2331, 65): '"""octavia.db.repositories.L7RuleRepository.update"""'}, {}), "('octavia.db.repositories.L7RuleRepository.update')", False, 'import mock\n'), ((2370, 5, 2370, 66), 'mock.patch', 'mock.patch', ({(2370, 16, 2370, 65): '"""octavia.db.repositories.L7RuleRepository.update"""'}, {}), "('octavia.db.repositories.L7RuleRepository.update')", False, 'import mock\n'), ((2409, 5, 2409, 66), 'mock.patch', 'mock.patch', ({(2409, 16, 2409, 65): '"""octavia.db.repositories.L7RuleRepository.update"""'}, {}), "('octavia.db.repositories.L7RuleRepository.update')", False, 'import mock\n'), ((2448, 5, 2448, 66), 'mock.patch', 'mock.patch', ({(2448, 16, 2448, 65): '"""octavia.db.repositories.L7RuleRepository.update"""'}, {}), "('octavia.db.repositories.L7RuleRepository.update')", False, 'import mock\n'), ((2487, 5, 2487, 66), 'mock.patch', 'mock.patch', ({(2487, 16, 2487, 65): '"""octavia.db.repositories.MemberRepository.update"""'}, {}), "('octavia.db.repositories.MemberRepository.update')", False, 'import mock\n'), ((2525, 5, 2525, 66), 'mock.patch', 'mock.patch', ({(2525, 16, 2525, 65): '"""octavia.db.repositories.MemberRepository.update"""'}, {}), "('octavia.db.repositories.MemberRepository.update')", False, 'import mock\n'), ((2564, 5, 2564, 66), 'mock.patch', 'mock.patch', ({(2564, 16, 2564, 65): '"""octavia.db.repositories.MemberRepository.update"""'}, {}), "('octavia.db.repositories.MemberRepository.update')", False, 'import mock\n'), ((2603, 5, 2603, 66), 'mock.patch', 'mock.patch', ({(2603, 16, 2603, 65): '"""octavia.db.repositories.MemberRepository.update"""'}, {}), "('octavia.db.repositories.MemberRepository.update')", False, 'import mock\n'), ((2642, 5, 2642, 64), 'mock.patch', 'mock.patch', ({(2642, 16, 2642, 63): '"""octavia.db.repositories.PoolRepository.update"""'}, {}), "('octavia.db.repositories.PoolRepository.update')", False, 'import mock\n'), ((2680, 5, 2680, 64), 'mock.patch', 'mock.patch', ({(2680, 16, 2680, 63): '"""octavia.db.repositories.PoolRepository.update"""'}, {}), "('octavia.db.repositories.PoolRepository.update')", False, 'import mock\n'), ((2718, 5, 2718, 64), 'mock.patch', 'mock.patch', ({(2718, 16, 2718, 63): '"""octavia.db.repositories.PoolRepository.update"""'}, {}), "('octavia.db.repositories.PoolRepository.update')", False, 'import mock\n'), ((2756, 5, 2756, 64), 'mock.patch', 'mock.patch', ({(2756, 16, 2756, 63): '"""octavia.db.repositories.PoolRepository.update"""'}, {}), "('octavia.db.repositories.PoolRepository.update')", False, 'import mock\n'), ((2795, 5, 2795, 79), 'mock.patch', 'mock.patch', ({(2795, 16, 2795, 78): '"""octavia.db.repositories.MemberRepository.update_pool_members"""'}, {}), "('octavia.db.repositories.MemberRepository.update_pool_members')", False, 'import mock\n'), ((107, 31, 107, 47), 'mock.MagicMock', 'mock.MagicMock', ({}, {}), '()', False, 'import mock\n'), ((111, 29, 111, 45), 'mock.MagicMock', 'mock.MagicMock', ({}, {}), '()', False, 'import mock\n'), ((114, 33, 114, 49), 'mock.MagicMock', 'mock.MagicMock', ({}, {}), '()', False, 'import mock\n'), ((117, 27, 117, 43), 'mock.MagicMock', 'mock.MagicMock', ({}, {}), '()', False, 'import mock\n'), ((120, 28, 120, 44), 'mock.MagicMock', 'mock.MagicMock', ({}, {}), '()', False, 'import mock\n'), ((129, 29, 129, 45), 'mock.MagicMock', 'mock.MagicMock', ({}, {}), '()', False, 'import mock\n'), ((132, 27, 132, 43), 'mock.MagicMock', 'mock.MagicMock', ({}, {}), '()', False, 'import mock\n'), ((150, 27, 150, 61), 'octavia.controller.worker.v2.tasks.database_tasks.CreateAmphoraInDB', 'database_tasks.CreateAmphoraInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((153, 8, 157, 28), 'octavia.db.repositories.AmphoraRepository.create.assert_called_once_with', 'repo.AmphoraRepository.create.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((192, 26, 192, 61), 'octavia.controller.worker.v2.tasks.database_tasks.DeleteListenerInDB', 'database_tasks.DeleteListenerInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((195, 8, 197, 27), 'octavia.db.repositories.ListenerRepository.delete.assert_called_once_with', 'repo.ListenerRepository.delete.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((200, 8, 200, 51), 'octavia.db.repositories.ListenerRepository.delete.reset_mock', 'repo.ListenerRepository.delete.reset_mock', ({}, {}), '()', True, 'from octavia.db import repositories as repo\n'), ((202, 8, 202, 58), 'octavia.db.repositories.ListenerRepository.delete.assert_not_called', 'repo.ListenerRepository.delete.assert_not_called', ({}, {}), '()', True, 'from octavia.db import repositories as repo\n'), ((217, 28, 217, 68), 'octavia.controller.worker.v2.tasks.database_tasks.DeleteHealthMonitorInDB', 'database_tasks.DeleteHealthMonitorInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((220, 8, 221, 29), 'octavia.db.repositories.HealthMonitorRepository.delete.assert_called_once_with', 'repo.HealthMonitorRepository.delete.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((227, 8, 228, 66), 'octavia.db.repositories.HealthMonitorRepository.update.assert_called_once_with', 'repo.HealthMonitorRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((235, 8, 236, 29), 'octavia.db.repositories.HealthMonitorRepository.delete.assert_called_once_with', 'repo.HealthMonitorRepository.delete.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((251, 28, 251, 74), 'octavia.controller.worker.v2.tasks.database_tasks.DeleteHealthMonitorInDBByPool', 'database_tasks.DeleteHealthMonitorInDBByPool', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((254, 8, 256, 21), 'octavia.db.repositories.HealthMonitorRepository.delete.assert_called_once_with', 'repo.HealthMonitorRepository.delete.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((262, 8, 263, 66), 'octavia.db.repositories.HealthMonitorRepository.update.assert_called_once_with', 'repo.HealthMonitorRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((282, 24, 282, 57), 'octavia.controller.worker.v2.tasks.database_tasks.DeleteMemberInDB', 'database_tasks.DeleteMemberInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((285, 8, 287, 25), 'octavia.db.repositories.MemberRepository.delete.assert_called_once_with', 'repo.MemberRepository.delete.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((310, 22, 310, 53), 'octavia.controller.worker.v2.tasks.database_tasks.DeletePoolInDB', 'database_tasks.DeletePoolInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((313, 8, 315, 23), 'octavia.db.repositories.PoolRepository.delete.assert_called_once_with', 'repo.PoolRepository.delete.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((339, 26, 339, 61), 'octavia.controller.worker.v2.tasks.database_tasks.DeleteL7PolicyInDB', 'database_tasks.DeleteL7PolicyInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((342, 8, 344, 27), 'octavia.db.repositories.L7PolicyRepository.delete.assert_called_once_with', 'repo.L7PolicyRepository.delete.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((368, 24, 368, 57), 'octavia.controller.worker.v2.tasks.database_tasks.DeleteL7RuleInDB', 'database_tasks.DeleteL7RuleInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((371, 8, 373, 25), 'octavia.db.repositories.L7RuleRepository.delete.assert_called_once_with', 'repo.L7RuleRepository.delete.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((398, 21, 398, 51), 'octavia.controller.worker.v2.tasks.database_tasks.ReloadAmphora', 'database_tasks.ReloadAmphora', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((401, 8, 403, 22), 'octavia.db.repositories.AmphoraRepository.get.assert_called_once_with', 'repo.AmphoraRepository.get.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((419, 20, 419, 55), 'octavia.controller.worker.v2.tasks.database_tasks.ReloadLoadBalancer', 'database_tasks.ReloadLoadBalancer', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((422, 8, 424, 21), 'octavia.db.repositories.LoadBalancerRepository.get.assert_called_once_with', 'repo.LoadBalancerRepository.get.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((442, 21, 442, 62), 'octavia.controller.worker.v2.tasks.database_tasks.UpdateVIPAfterAllocation', 'database_tasks.UpdateVIPAfterAllocation', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((463, 30, 463, 68), 'octavia.controller.worker.v2.tasks.database_tasks.UpdateAmphoraeVIPData', 'database_tasks.UpdateAmphoraeVIPData', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((483, 31, 483, 68), 'octavia.controller.worker.v2.tasks.database_tasks.UpdateAmphoraVIPData', 'database_tasks.UpdateAmphoraVIPData', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((504, 32, 504, 73), 'octavia.controller.worker.v2.tasks.database_tasks.UpdateAmpFailoverDetails', 'database_tasks.UpdateAmpFailoverDetails', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((528, 29, 528, 78), 'octavia.controller.worker.v2.tasks.database_tasks.AssociateFailoverAmphoraWithLBID', 'database_tasks.AssociateFailoverAmphoraWithLBID', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((565, 24, 565, 65), 'octavia.controller.worker.v2.tasks.database_tasks.MapLoadbalancerToAmphora', 'database_tasks.MapLoadbalancerToAmphora', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((568, 8, 571, 17), 'octavia.db.repositories.AmphoraRepository.allocate_and_associate.assert_called_once_with', 'repo.AmphoraRepository.allocate_and_associate.assert_called_once_with', ({(569, 12, 569, 18): '"""TEST"""', (570, 12, 570, 17): 'LB_ID', (571, 12, 571, 16): 'None'}, {}), "('TEST',\n LB_ID, None)", True, 'from octavia.db import repositories as repo\n'), ((581, 8, 584, 48), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((587, 8, 587, 55), 'octavia.db.repositories.LoadBalancerRepository.update.reset_mock', 'repo.LoadBalancerRepository.update.reset_mock', ({}, {}), '()', True, 'from octavia.db import repositories as repo\n'), ((590, 8, 593, 48), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((608, 24, 608, 65), 'octavia.controller.worker.v2.tasks.database_tasks.MapLoadbalancerToAmphora', 'database_tasks.MapLoadbalancerToAmphora', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((613, 8, 616, 21), 'octavia.db.repositories.AmphoraRepository.allocate_and_associate.assert_called_once_with', 'repo.AmphoraRepository.allocate_and_associate.assert_called_once_with', ({(614, 12, 614, 18): '"""TEST"""', (615, 12, 615, 17): 'LB_ID', (616, 12, 616, 20): '"""fakeaz"""'}, {}), "('TEST',\n LB_ID, 'fakeaz')", True, 'from octavia.db import repositories as repo\n'), ((626, 8, 629, 48), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((632, 8, 632, 55), 'octavia.db.repositories.LoadBalancerRepository.update.reset_mock', 'repo.LoadBalancerRepository.update.reset_mock', ({}, {}), '()', True, 'from octavia.db import repositories as repo\n'), ((635, 8, 638, 48), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((655, 34, 656, 61), 'octavia.controller.worker.v2.tasks.database_tasks.MarkLBAmphoraeDeletedInDB', 'database_tasks.MarkLBAmphoraeDeletedInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((659, 8, 662, 37), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((679, 36, 680, 62), 'octavia.controller.worker.v2.tasks.database_tasks.MarkAmphoraAllocatedInDB', 'database_tasks.MarkAmphoraAllocatedInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((684, 8, 690, 35), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((698, 8, 701, 35), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((710, 8, 713, 35), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((724, 33, 724, 72), 'octavia.controller.worker.v2.tasks.database_tasks.MarkAmphoraBootingInDB', 'database_tasks.MarkAmphoraBootingInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((728, 8, 732, 34), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((740, 8, 744, 34), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((753, 8, 757, 34), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((768, 33, 768, 72), 'octavia.controller.worker.v2.tasks.database_tasks.MarkAmphoraDeletedInDB', 'database_tasks.MarkAmphoraDeletedInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((771, 8, 774, 37), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((780, 8, 783, 35), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((790, 8, 793, 35), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((804, 41, 805, 71), 'octavia.controller.worker.v2.tasks.database_tasks.MarkAmphoraPendingDeleteInDB', 'database_tasks.MarkAmphoraPendingDeleteInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((808, 8, 811, 44), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((817, 8, 820, 35), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((828, 8, 831, 35), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((842, 41, 843, 71), 'octavia.controller.worker.v2.tasks.database_tasks.MarkAmphoraPendingUpdateInDB', 'database_tasks.MarkAmphoraPendingUpdateInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((846, 8, 849, 44), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((855, 8, 858, 35), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((865, 8, 868, 35), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((881, 31, 881, 68), 'octavia.controller.worker.v2.tasks.database_tasks.MarkAmphoraReadyInDB', 'database_tasks.MarkAmphoraReadyInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((884, 8, 889, 36), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((896, 8, 901, 36), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((909, 8, 914, 36), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((927, 30, 927, 64), 'octavia.controller.worker.v2.tasks.database_tasks.UpdateAmphoraInfo', 'database_tasks.UpdateAmphoraInfo', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((930, 8, 936, 42), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((938, 8, 940, 22), 'octavia.db.repositories.AmphoraRepository.get.assert_called_once_with', 'repo.AmphoraRepository.get.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((951, 32, 951, 72), 'octavia.controller.worker.v2.tasks.database_tasks.MarkListenerDeletedInDB', 'database_tasks.MarkListenerDeletedInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((954, 8, 957, 50), 'octavia.db.repositories.ListenerRepository.update.assert_called_once_with', 'repo.ListenerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((963, 8, 966, 48), 'octavia.db.repositories.ListenerRepository.update.assert_called_once_with', 'repo.ListenerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((973, 8, 976, 48), 'octavia.db.repositories.ListenerRepository.update.assert_called_once_with', 'repo.ListenerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((987, 40, 988, 71), 'octavia.controller.worker.v2.tasks.database_tasks.MarkListenerPendingDeleteInDB', 'database_tasks.MarkListenerPendingDeleteInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((991, 8, 994, 57), 'octavia.db.repositories.ListenerRepository.update.assert_called_once_with', 'repo.ListenerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1000, 8, 1003, 48), 'octavia.db.repositories.ListenerRepository.update.assert_called_once_with', 'repo.ListenerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1010, 8, 1013, 48), 'octavia.db.repositories.ListenerRepository.update.assert_called_once_with', 'repo.ListenerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1029, 40, 1030, 70), 'octavia.controller.worker.v2.tasks.database_tasks.MarkLBAndListenersActiveInDB', 'database_tasks.MarkLBAndListenersActiveInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1034, 8, 1037, 49), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1045, 40, 1046, 70), 'octavia.controller.worker.v2.tasks.database_tasks.MarkLBAndListenersActiveInDB', 'database_tasks.MarkLBAndListenersActiveInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1050, 8, 1053, 49), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1066, 8, 1069, 48), 'octavia.db.repositories.ListenerRepository.update.assert_called_once_with', 'repo.ListenerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1070, 8, 1073, 48), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1081, 8, 1084, 48), 'octavia.db.repositories.ListenerRepository.update.assert_called_once_with', 'repo.ListenerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1085, 8, 1088, 48), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1107, 8, 1110, 48), 'octavia.db.repositories.ListenerRepository.update.assert_called_once_with', 'repo.ListenerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1111, 8, 1114, 48), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1128, 26, 1128, 72), 'octavia.controller.worker.v2.tasks.database_tasks.UpdateAmphoraDBCertExpiration', 'database_tasks.UpdateAmphoraDBCertExpiration', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1129, 14, 1129, 68), 'octavia.common.utils.get_six_compatible_server_certs_key_passphrase', 'utils.get_six_compatible_server_certs_key_passphrase', ({}, {}), '()', False, 'from octavia.common import utils\n'), ((1130, 14, 1130, 32), 'cryptography.fernet.Fernet', 'fernet.Fernet', ({(1130, 28, 1130, 31): 'key'}, {}), '(key)', False, 'from cryptography import fernet\n'), ((1136, 8, 1139, 39), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1149, 29, 1149, 74), 'octavia.controller.worker.v2.tasks.database_tasks.UpdateAmphoraCertBusyToFalse', 'database_tasks.UpdateAmphoraCertBusyToFalse', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1151, 8, 1154, 28), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1165, 35, 1165, 68), 'octavia.controller.worker.v2.tasks.database_tasks.MarkLBActiveInDB', 'database_tasks.MarkLBActiveInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1168, 8, 1171, 49), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1178, 8, 1181, 48), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1189, 8, 1192, 48), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1205, 35, 1205, 78), 'octavia.controller.worker.v2.tasks.database_tasks.MarkLBActiveInDBByListener', 'database_tasks.MarkLBActiveInDBByListener', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1208, 8, 1211, 49), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1218, 8, 1221, 48), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1229, 8, 1232, 48), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1245, 13, 1245, 68), 'octavia.common.data_models.LoadBalancer', 'data_models.LoadBalancer', (), '', False, 'from octavia.common import data_models\n'), ((1246, 25, 1246, 78), 'octavia.controller.worker.v2.tasks.database_tasks.MarkLBActiveInDB', 'database_tasks.MarkLBActiveInDB', (), '', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1249, 8, 1252, 49), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1264, 8, 1267, 48), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1293, 22, 1293, 56), 'octavia.common.data_models.Pool', 'data_models.Pool', (), '', False, 'from octavia.common import data_models\n'), ((1296, 25, 1296, 60), 'octavia.common.data_models.HealthMonitor', 'data_models.HealthMonitor', (), '', False, 'from octavia.common import data_models\n'), ((1297, 23, 1299, 70), 'octavia.common.data_models.Pool', 'data_models.Pool', (), '', False, 'from octavia.common import data_models\n'), ((1300, 20, 1301, 67), 'octavia.common.data_models.Listener', 'data_models.Listener', (), '', False, 'from octavia.common import data_models\n'), ((1304, 24, 1305, 58), 'octavia.common.data_models.Pool', 'data_models.Pool', (), '', False, 'from octavia.common import data_models\n'), ((1307, 26, 1309, 63), 'octavia.common.data_models.L7Policy', 'data_models.L7Policy', (), '', False, 'from octavia.common import data_models\n'), ((1311, 20, 1312, 63), 'octavia.common.data_models.Listener', 'data_models.Listener', (), '', False, 'from octavia.common import data_models\n'), ((1317, 13, 1318, 50), 'octavia.common.data_models.LoadBalancer', 'data_models.LoadBalancer', (), '', False, 'from octavia.common import data_models\n'), ((1319, 25, 1319, 78), 'octavia.controller.worker.v2.tasks.database_tasks.MarkLBActiveInDB', 'database_tasks.MarkLBActiveInDB', (), '', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1322, 8, 1325, 49), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1360, 8, 1363, 48), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1398, 36, 1398, 70), 'octavia.controller.worker.v2.tasks.database_tasks.MarkLBDeletedInDB', 'database_tasks.MarkLBDeletedInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1401, 8, 1404, 50), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1410, 8, 1413, 48), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1420, 8, 1423, 48), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1434, 44, 1435, 69), 'octavia.controller.worker.v2.tasks.database_tasks.MarkLBPendingDeleteInDB', 'database_tasks.MarkLBPendingDeleteInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1438, 8, 1441, 57), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1447, 8, 1450, 48), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1457, 8, 1460, 48), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1473, 28, 1473, 64), 'octavia.controller.worker.v2.tasks.database_tasks.UpdateHealthMonInDB', 'database_tasks.UpdateHealthMonInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1477, 8, 1480, 31), 'octavia.db.repositories.HealthMonitorRepository.update.assert_called_once_with', 'repo.HealthMonitorRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1486, 8, 1489, 48), 'octavia.db.repositories.HealthMonitorRepository.update.assert_called_once_with', 'repo.HealthMonitorRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1496, 8, 1499, 48), 'octavia.db.repositories.HealthMonitorRepository.update.assert_called_once_with', 'repo.HealthMonitorRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1510, 31, 1510, 70), 'octavia.controller.worker.v2.tasks.database_tasks.UpdateLoadbalancerInDB', 'database_tasks.UpdateLoadbalancerInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1514, 8, 1517, 45), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1523, 8, 1526, 48), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1533, 8, 1536, 48), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1550, 31, 1550, 70), 'octavia.controller.worker.v2.tasks.database_tasks.UpdateLoadbalancerInDB', 'database_tasks.UpdateLoadbalancerInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1556, 8, 1559, 45), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1561, 8, 1562, 79), 'octavia.db.repositories.VipRepository.update.assert_called_once_with', 'repo.VipRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1573, 26, 1573, 61), 'octavia.controller.worker.v2.tasks.database_tasks.UpdateListenerInDB', 'database_tasks.UpdateListenerInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1578, 8, 1581, 45), 'octavia.db.repositories.ListenerRepository.update.assert_called_once_with', 'repo.ListenerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1586, 8, 1589, 48), 'octavia.db.repositories.ListenerRepository.update.assert_called_once_with', 'repo.ListenerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1595, 8, 1598, 48), 'octavia.db.repositories.ListenerRepository.update.assert_called_once_with', 'repo.ListenerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1611, 24, 1611, 57), 'octavia.controller.worker.v2.tasks.database_tasks.UpdateMemberInDB', 'database_tasks.UpdateMemberInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1615, 8, 1618, 44), 'octavia.db.repositories.MemberRepository.update.assert_called_once_with', 'repo.MemberRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1624, 8, 1627, 48), 'octavia.db.repositories.MemberRepository.update.assert_called_once_with', 'repo.MemberRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1634, 8, 1637, 48), 'octavia.db.repositories.MemberRepository.update.assert_called_once_with', 'repo.MemberRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1654, 22, 1654, 53), 'octavia.controller.worker.v2.tasks.database_tasks.UpdatePoolInDB', 'database_tasks.UpdatePoolInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1658, 8, 1661, 24), 'octavia.db.repositories.Repositories.update_pool_and_sp.assert_called_once_with', 'repo.Repositories.update_pool_and_sp.assert_called_once_with', ({(1659, 12, 1659, 18): '"""TEST"""', (1660, 12, 1660, 19): 'POOL_ID', (1661, 12, 1661, 23): 'update_dict'}, {}), "('TEST',\n POOL_ID, update_dict)", True, 'from octavia.db import repositories as repo\n'), ((1667, 8, 1670, 53), 'octavia.db.repositories.Repositories.update_pool_and_sp.assert_called_once_with', 'repo.Repositories.update_pool_and_sp.assert_called_once_with', ({(1668, 12, 1668, 18): '"""TEST"""', (1669, 12, 1669, 19): 'POOL_ID', (1670, 12, 1670, 52): "{'provisioning_status': constants.ERROR}"}, {}), "('TEST',\n POOL_ID, {'provisioning_status': constants.ERROR})", True, 'from octavia.db import repositories as repo\n'), ((1677, 8, 1680, 53), 'octavia.db.repositories.Repositories.update_pool_and_sp.assert_called_once_with', 'repo.Repositories.update_pool_and_sp.assert_called_once_with', ({(1678, 12, 1678, 18): '"""TEST"""', (1679, 12, 1679, 19): 'POOL_ID', (1680, 12, 1680, 52): "{'provisioning_status': constants.ERROR}"}, {}), "('TEST',\n POOL_ID, {'provisioning_status': constants.ERROR})", True, 'from octavia.db import repositories as repo\n'), ((1693, 26, 1693, 61), 'octavia.controller.worker.v2.tasks.database_tasks.UpdateL7PolicyInDB', 'database_tasks.UpdateL7PolicyInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1697, 8, 1700, 52), 'octavia.db.repositories.L7PolicyRepository.update.assert_called_once_with', 'repo.L7PolicyRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1706, 8, 1709, 48), 'octavia.db.repositories.L7PolicyRepository.update.assert_called_once_with', 'repo.L7PolicyRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1716, 8, 1719, 48), 'octavia.db.repositories.L7PolicyRepository.update.assert_called_once_with', 'repo.L7PolicyRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1734, 24, 1734, 57), 'octavia.controller.worker.v2.tasks.database_tasks.UpdateL7RuleInDB', 'database_tasks.UpdateL7RuleInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1741, 8, 1746, 25), 'octavia.db.repositories.L7RuleRepository.update.assert_called_once_with', 'repo.L7RuleRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1752, 8, 1755, 48), 'octavia.db.repositories.L7PolicyRepository.update.assert_called_once_with', 'repo.L7PolicyRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1762, 8, 1765, 48), 'octavia.db.repositories.L7PolicyRepository.update.assert_called_once_with', 'repo.L7PolicyRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1776, 26, 1776, 60), 'octavia.controller.worker.v2.tasks.database_tasks.GetAmphoraDetails', 'database_tasks.GetAmphoraDetails', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1796, 31, 1796, 69), 'octavia.controller.worker.v2.tasks.database_tasks.MarkAmphoraMasterInDB', 'database_tasks.MarkAmphoraMasterInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1798, 8, 1800, 57), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1805, 8, 1806, 58), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1816, 31, 1816, 69), 'octavia.controller.worker.v2.tasks.database_tasks.MarkAmphoraBackupInDB', 'database_tasks.MarkAmphoraBackupInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1818, 8, 1820, 57), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1825, 8, 1826, 58), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1830, 35, 1830, 77), 'octavia.controller.worker.v2.tasks.database_tasks.MarkAmphoraStandAloneInDB', 'database_tasks.MarkAmphoraStandAloneInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1832, 8, 1834, 31), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1839, 8, 1840, 58), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1846, 8, 1847, 58), 'octavia.db.repositories.AmphoraRepository.update.assert_called_once_with', 'repo.AmphoraRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((1859, 15, 1859, 31), 'mock.MagicMock', 'mock.MagicMock', ({}, {}), '()', False, 'import mock\n'), ((1860, 18, 1860, 43), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ({}, {}), '()', False, 'from oslo_utils import uuidutils\n'), ((1861, 15, 1861, 31), 'mock.MagicMock', 'mock.MagicMock', ({}, {}), '()', False, 'import mock\n'), ((1862, 18, 1862, 43), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ({}, {}), '()', False, 'from oslo_utils import uuidutils\n'), ((1863, 13, 1863, 29), 'mock.MagicMock', 'mock.MagicMock', ({}, {}), '()', False, 'import mock\n'), ((1868, 31, 1868, 75), 'octavia.controller.worker.v2.tasks.database_tasks.GetAmphoraeFromLoadbalancer', 'database_tasks.GetAmphoraeFromLoadbalancer', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1884, 31, 1884, 76), 'octavia.controller.worker.v2.tasks.database_tasks.GetListenersFromLoadbalancer', 'database_tasks.GetListenersFromLoadbalancer', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1898, 30, 1898, 69), 'octavia.controller.worker.v2.tasks.database_tasks.GetVipFromLoadbalancer', 'database_tasks.GetVipFromLoadbalancer', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1915, 28, 1915, 65), 'octavia.controller.worker.v2.tasks.database_tasks.CreateVRRPGroupForLB', 'database_tasks.CreateVRRPGroupForLB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1936, 29, 1936, 76), 'octavia.controller.worker.v2.tasks.database_tasks.DisableAmphoraHealthMonitoring', 'database_tasks.DisableAmphoraHealthMonitoring', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1953, 12, 1953, 62), 'octavia.controller.worker.v2.tasks.database_tasks.DisableLBAmphoraeHealthMonitoring', 'database_tasks.DisableLBAmphoraeHealthMonitoring', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1968, 20, 1968, 58), 'octavia.controller.worker.v2.tasks.database_tasks.MarkAmphoraHealthBusy', 'database_tasks.MarkAmphoraHealthBusy', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1985, 12, 1985, 53), 'octavia.controller.worker.v2.tasks.database_tasks.MarkLBAmphoraeHealthBusy', 'database_tasks.MarkLBAmphoraeHealthBusy', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((1999, 35, 1999, 75), 'octavia.controller.worker.v2.tasks.database_tasks.UpdateLBServerGroupInDB', 'database_tasks.UpdateLBServerGroupInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((2002, 8, 2005, 44), 'octavia.db.repositories.LoadBalancerRepository.update.assert_called_once_with', 'repo.LoadBalancerRepository.update.assert_called_once_with', (), '', True, 'from octavia.db import repositories as repo\n'), ((2027, 34, 2027, 78), 'octavia.controller.worker.v2.tasks.database_tasks.MarkHealthMonitorActiveInDB', 'database_tasks.MarkHealthMonitorActiveInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((2067, 42, 2068, 78), 'octavia.controller.worker.v2.tasks.database_tasks.MarkHealthMonitorPendingCreateInDB', 'database_tasks.MarkHealthMonitorPendingCreateInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((2107, 42, 2108, 78), 'octavia.controller.worker.v2.tasks.database_tasks.MarkHealthMonitorPendingDeleteInDB', 'database_tasks.MarkHealthMonitorPendingDeleteInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((2147, 42, 2148, 78), 'octavia.controller.worker.v2.tasks.database_tasks.MarkHealthMonitorPendingUpdateInDB', 'database_tasks.MarkHealthMonitorPendingUpdateInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((2186, 32, 2186, 71), 'octavia.controller.worker.v2.tasks.database_tasks.MarkL7PolicyActiveInDB', 'database_tasks.MarkL7PolicyActiveInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((2225, 40, 2226, 71), 'octavia.controller.worker.v2.tasks.database_tasks.MarkL7PolicyPendingCreateInDB', 'database_tasks.MarkL7PolicyPendingCreateInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((2264, 40, 2265, 71), 'octavia.controller.worker.v2.tasks.database_tasks.MarkL7PolicyPendingDeleteInDB', 'database_tasks.MarkL7PolicyPendingDeleteInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((2303, 40, 2304, 71), 'octavia.controller.worker.v2.tasks.database_tasks.MarkL7PolicyPendingUpdateInDB', 'database_tasks.MarkL7PolicyPendingUpdateInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((2342, 30, 2342, 67), 'octavia.controller.worker.v2.tasks.database_tasks.MarkL7RuleActiveInDB', 'database_tasks.MarkL7RuleActiveInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((2381, 38, 2382, 67), 'octavia.controller.worker.v2.tasks.database_tasks.MarkL7RulePendingCreateInDB', 'database_tasks.MarkL7RulePendingCreateInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((2420, 38, 2421, 67), 'octavia.controller.worker.v2.tasks.database_tasks.MarkL7RulePendingDeleteInDB', 'database_tasks.MarkL7RulePendingDeleteInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((2459, 38, 2460, 67), 'octavia.controller.worker.v2.tasks.database_tasks.MarkL7RulePendingUpdateInDB', 'database_tasks.MarkL7RulePendingUpdateInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((2498, 30, 2498, 67), 'octavia.controller.worker.v2.tasks.database_tasks.MarkMemberActiveInDB', 'database_tasks.MarkMemberActiveInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((2536, 38, 2537, 67), 'octavia.controller.worker.v2.tasks.database_tasks.MarkMemberPendingCreateInDB', 'database_tasks.MarkMemberPendingCreateInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((2575, 38, 2576, 67), 'octavia.controller.worker.v2.tasks.database_tasks.MarkMemberPendingDeleteInDB', 'database_tasks.MarkMemberPendingDeleteInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((2614, 38, 2615, 67), 'octavia.controller.worker.v2.tasks.database_tasks.MarkMemberPendingUpdateInDB', 'database_tasks.MarkMemberPendingUpdateInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((2653, 28, 2653, 63), 'octavia.controller.worker.v2.tasks.database_tasks.MarkPoolActiveInDB', 'database_tasks.MarkPoolActiveInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((2691, 36, 2691, 78), 'octavia.controller.worker.v2.tasks.database_tasks.MarkPoolPendingCreateInDB', 'database_tasks.MarkPoolPendingCreateInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((2729, 36, 2729, 78), 'octavia.controller.worker.v2.tasks.database_tasks.MarkPoolPendingDeleteInDB', 'database_tasks.MarkPoolPendingDeleteInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((2767, 36, 2768, 63), 'octavia.controller.worker.v2.tasks.database_tasks.MarkPoolPendingUpdateInDB', 'database_tasks.MarkPoolPendingUpdateInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((2807, 25, 2807, 78), 'octavia.controller.worker.v2.tasks.database_tasks.UpdatePoolMembersOperatingStatusInDB', 'database_tasks.UpdatePoolMembersOperatingStatusInDB', ({}, {}), '()', False, 'from octavia.controller.worker.v2.tasks import database_tasks\n'), ((232, 51, 232, 70), 'sqlalchemy.orm.exc.NoResultFound', 'exc.NoResultFound', ({}, {}), '()', False, 'from sqlalchemy.orm import exc\n'), ((1132, 12, 1132, 55), 'octavia.common.utils.get_six_compatible_value', 'utils.get_six_compatible_value', ({(1132, 43, 1132, 54): '"""test_cert"""'}, {}), "('test_cert')", False, 'from octavia.common import utils\n'), ((1243, 21, 1243, 57), 'octavia.common.data_models.Listener', 'data_models.Listener', (), '', False, 'from octavia.common import data_models\n'), ((1244, 21, 1244, 57), 'octavia.common.data_models.Listener', 'data_models.Listener', (), '', False, 'from octavia.common import data_models\n'), ((1306, 19, 1306, 49), 'octavia.common.data_models.L7Rule', 'data_models.L7Rule', (), '', False, 'from octavia.common import data_models\n'), ((1255, 13, 1256, 60), 'mock.call', 'mock.call', (), '', False, 'import mock\n'), ((1257, 13, 1258, 60), 'mock.call', 'mock.call', (), '', False, 'import mock\n'), ((1270, 13, 1271, 59), 'mock.call', 'mock.call', (), '', False, 'import mock\n'), ((1272, 13, 1273, 59), 'mock.call', 'mock.call', (), '', False, 'import mock\n'), ((1328, 13, 1329, 60), 'mock.call', 'mock.call', (), '', False, 'import mock\n'), ((1330, 13, 1331, 60), 'mock.call', 'mock.call', (), '', False, 'import mock\n'), ((1334, 13, 1335, 60), 'mock.call', 'mock.call', (), '', False, 'import mock\n'), ((1336, 13, 1337, 60), 'mock.call', 'mock.call', (), '', False, 'import mock\n'), ((1340, 13, 1341, 60), 'mock.call', 'mock.call', (), '', False, 'import mock\n'), ((1344, 13, 1345, 60), 'mock.call', 'mock.call', (), '', False, 'import mock\n'), ((1348, 13, 1349, 60), 'mock.call', 'mock.call', (), '', False, 'import mock\n'), ((1366, 13, 1367, 59), 'mock.call', 'mock.call', (), '', False, 'import mock\n'), ((1368, 13, 1369, 59), 'mock.call', 'mock.call', (), '', False, 'import mock\n'), ((1372, 13, 1373, 59), 'mock.call', 'mock.call', (), '', False, 'import mock\n'), ((1374, 13, 1375, 59), 'mock.call', 'mock.call', (), '', False, 'import mock\n'), ((1378, 13, 1379, 59), 'mock.call', 'mock.call', (), '', False, 'import mock\n'), ((1382, 13, 1383, 59), 'mock.call', 'mock.call', (), '', False, 'import mock\n'), ((1386, 13, 1387, 59), 'mock.call', 'mock.call', (), '', False, 'import mock\n')]
Jay4C/Web-Scraping
Yellow_Pages_Lithuania/unit_tests.py
187679bee035dad661d983b5a8382240f820c337
import time from bs4 import BeautifulSoup import requests import pymysql.cursors import unittest class UnitTestsDataMinerYellowPagesLithuania(unittest.TestCase): def test_extract_one_email(self): url = "https://www.visalietuva.lt/en/company/astorija-hotel-uab" # Request the content of a page from the url html = requests.get(url) # Parse the content of html_doc soup = BeautifulSoup(html.content, 'html.parser') if soup.find('a', {'itemprop': 'email'}) is not None: email = "info@" + soup.find('a', {'itemprop': 'email'}).text.split("@")[1] print('email : ' + email) else: print('no email business') def test_extract_emails_from_all_page_of_results_for_one_activity_and_capital(self): activity = "hotel" city = "vilniuje" url_search = "https://www.visalietuva.lt/en/search/" + activity + "/" + city html_search = requests.get(url_search) soup_search = BeautifulSoup(html_search.content, 'html.parser') number_of_pages = 0 if soup_search.find('div', {'class': 'search_count f_left'}) is not None: number_of_pages_with_coma = int(soup_search .find('div', {'class': 'search_count f_left'}) .find('span').text )/20 if int(str(number_of_pages_with_coma).split(".")[1][:1]) < 5: number_of_pages += round(number_of_pages_with_coma) + 1 print('number_of_pages : ' + str(number_of_pages)) elif int(str(number_of_pages_with_coma).split(".")[1][:1]) >= 5: number_of_pages += round(number_of_pages_with_coma) print('number_of_pages : ' + str(number_of_pages)) i_1 = 0 if soup_search.find('div', {'class': 'company_list'}) is not None: print(url_search) for result_item in soup_search \ .find('div', {'class': 'company_list'}) \ .find_all('div', {'class': 'item'}): i_1 += 1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') time.sleep(2) # Request the content of a page from the url html_result = requests.get(url_result) # Parse the content of html_doc soup_result = BeautifulSoup(html_result.content, 'html.parser') if soup_result.find('a', {'itemprop': 'email'}) is not None: email = "info@" + soup_result.find('a', {'itemprop': 'email'}).text.split("@")[1] print(str(i_1) + ' email : ' + email) else: print(str(i_1) + ' no email business') else: print('sorry there is nothing') if number_of_pages > 1: for i in range(2, number_of_pages+1): url_of_one_page_of_results = url_search + "/" + str(i) print(url_of_one_page_of_results) time.sleep(2) html_of_one_page_of_results = requests.get(url_of_one_page_of_results) soup_of_one_page_of_results = BeautifulSoup(html_of_one_page_of_results.content, 'html.parser') if soup_of_one_page_of_results.find('div', {'class': 'company_list'}) is not None: for result_item in soup_of_one_page_of_results\ .find('div', {'class': 'company_list'})\ .find_all('div', {'class': 'item'}): i_1 += 1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') # Request the content of a page from the url html_result = requests.get(url_result) # Parse the content of html_doc soup_result = BeautifulSoup(html_result.content, 'html.parser') if soup_result.find('a', {'itemprop': 'email'}) is not None: email = "info@" + soup_result.find('a', {'itemprop': 'email'}).text.split("@")[1] print(str(i_1) + ' email : ' + email) else: print(str(i_1) + ' no email business') else: print('sorry there is nothing') def test_extract_emails_from_all_page_of_results_for_all_activities_and_capitals(self): activites = [ # {'id': '1', 'url': 'labour'} #{'id': '2', 'url': 'real+estate'}, #{'id': '3', 'url': 'recruitment'}, #{'id': '4', 'url': 'software'}, #{'id': '5', 'url': 'hotel'}, #{'id': '6', 'url': 'landlord'}, #{'id': '7', 'url': 'cleaning'}, #{'id': '8', 'url': 'association'}, #{'id': '9', 'url': 'financial'}, #{'id': '10', 'url': 'restaurant'}, #{'id': '11', 'url': 'building'}, #{'id': '12', 'url': 'hairdresser'}, #{'id': '13', 'url': 'florist'}, #{'id': '14', 'url': 'locksmith'}, #{'id': '15', 'url': 'bakery'}, #{'id': '16', 'url': 'insurance'}, #{'id': '17', 'url': 'pharmacy'}, #{'id': '18', 'url': 'moving'}, #{'id': '19', 'url': 'electricity'}, #{'id': '20', 'url': 'plumbing'}, #{'id': '21', 'url': 'security'}, #{'id': '22', 'url': 'lawyer'}, #{'id': '23', 'url': 'bank'}, #{'id': '24', 'url': 'garage'}, #{'id': '25', 'url': 'dentist'}, #{'id': '26', 'url': 'doctor'}, #{'id': '27', 'url': 'accounting'}, #{'id': '28', 'url': 'store'}, #{'id': '29', 'url': 'notary'}, #{'id': '30', 'url': 'jeweller'}, #{'id': '31', 'url': 'tailor'}, #{'id': '32', 'url': 'meat'}, #{'id': '33', 'url': 'library'}, #{'id': '34', 'url': 'architect'} ] capitales_du_monde = [ {'id': '183', 'nom': 'akmeneje'},#Akmenė {'id': '184', 'nom': 'alytuje'},#Alytus {'id': '185', 'nom': 'anyksciuose'},#Anykščiai {'id': '186', 'nom': 'birstone'},#Birštonas {'id': '187', 'nom': 'birzuose'},#Biržai {'id': '188', 'nom': 'druskininkuose'},#Druskininkai {'id': '189', 'nom': 'elektrenuose'},#Elektrėnai {'id': '190', 'nom': 'ignalinoje'},#Ignalina {'id': '191', 'nom': 'jonavoje'},#Jonava {'id': '192', 'nom': 'joniskyje'},#Joniškis {'id': '193', 'nom': 'jurbarke'},#Jurbarkas {'id': '194', 'nom': 'kaisiadoryse'},#Kaišiadorys {'id': '195', 'nom': 'kalvarijoje'},#Kalvarija {'id': '196', 'nom': 'kaune'},#Kaunas {'id': '197', 'nom': 'kazlu-rudoje'},#Kazlų Rūda {'id': '198', 'nom': 'kedainiuose'},#Kėdainiai {'id': '199', 'nom': 'kelmeje'},#Kelmė {'id': '200', 'nom': 'klaipedoje'},#Klaipėda {'id': '201', 'nom': 'kretingoje'},#Kretinga {'id': '202', 'nom': 'kupiskyje'},#Kupiškis {'id': '203', 'nom': 'lazdijuose'},#Lazdijai {'id': '204', 'nom': 'marijampoleje'},#Marijampolė {'id': '205', 'nom': 'mazeikiuose'},#Mažeikiai {'id': '206', 'nom': 'moletuose'},#Molėtai {'id': '207', 'nom': 'neringoje'},#Neringa {'id': '208', 'nom': 'pagegiuose'},#Pagėgiai {'id': '209', 'nom': 'pakruojyje'},#Pakruojis {'id': '210', 'nom': 'palangoje'},#Palanga {'id': '211', 'nom': 'panevezyje'},#Panevėžys {'id': '212', 'nom': 'pasvalyje'},#Pasvalys {'id': '213', 'nom': 'plungeje'},#Plungė {'id': '214', 'nom': 'prienuose'},#Prienai {'id': '215', 'nom': 'radviliskyje'},#Radviliškis {'id': '216', 'nom': 'raseiniuose'},#Raseiniai {'id': '217', 'nom': 'rietave'},#Rietavas {'id': '218', 'nom': 'rokiskyje'},#Rokiškis {'id': '219', 'nom': 'sakiuose'},#Šakiai {'id': '220', 'nom': 'salcininkuose'},#Šalčininkai {'id': '221', 'nom': 'siauliuose'},#Šiauliai {'id': '222', 'nom': 'silaleje'},#Šilalė {'id': '223', 'nom': 'siluteje'},#Šilutė {'id': '224', 'nom': 'sirvintose'},#Širvintos {'id': '225', 'nom': 'skuode'},#Skuodas {'id': '226', 'nom': 'svencionyse'},#Švenčionys {'id': '227', 'nom': 'taurageje'},#Tauragė {'id': '228', 'nom': 'telsiuose'},#Telšiai {'id': '229', 'nom': 'trakuose'},#Trakai {'id': '230', 'nom': 'ukmergeje'},#Ukmergė {'id': '231', 'nom': 'utenoje'},#Utena {'id': '232', 'nom': 'varenoje'},#Varėna {'id': '233', 'nom': 'vilkaviskyje'},#Vilkaviškis {'id': '234', 'nom': 'vilniuje'},#Vilnius {'id': '235', 'nom': 'visagine'},#Visaginas {'id': '236', 'nom': 'zarasuose'}#Zarasai ] try: for capitale in capitales_du_monde: for activite in activites: try: activity = activite.get("url") city = capitale.get("nom") url_search = "https://www.visalietuva.lt/en/search/" + activity + "/" + city html_search = requests.get(url_search) soup_search = BeautifulSoup(html_search.content, 'html.parser') number_of_pages = 0 if soup_search.find('div', {'class': 'search_count f_left'}) is not None: number_of_pages_with_coma = int(soup_search .find('div', {'class': 'search_count f_left'}) .find('span').text ) / 20 if int(str(number_of_pages_with_coma).split(".")[1][:1]) < 5: number_of_pages += round(number_of_pages_with_coma) + 1 print('number_of_pages : ' + str(number_of_pages)) elif int(str(number_of_pages_with_coma).split(".")[1][:1]) >= 5: number_of_pages += round(number_of_pages_with_coma) print('number_of_pages : ' + str(number_of_pages)) i_1 = 0 if soup_search.find('div', {'class': 'company_list'}) is not None: print(url_search) for result_item in soup_search \ .find('div', {'class': 'company_list'}) \ .find_all('div', {'class': 'item'}): i_1 += 1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') time.sleep(2) # Request the content of a page from the url html_result = requests.get(url_result) # Parse the content of html_doc soup_result = BeautifulSoup(html_result.content, 'html.parser') if soup_result.find('a', {'itemprop': 'email'}) is not None: email = "info@" + soup_result.find('a', {'itemprop': 'email'}).text.split("@")[1] try: connection = pymysql.connect( host='localhost', port=3306, user='', password='', db='contacts_professionnels', charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor ) with connection.cursor() as cursor: try: sql = "INSERT INTO `emails` (" \ "`id_activite`, " \ "`id_capitale_du_monde`, " \ "`email`) VALUE (%s, %s, %s)" cursor.execute(sql, ( activite.get('id'), capitale.get('id'), email)) connection.commit() print(str(i_1) + " The record is stored : " + email) connection.close() except: print(str(i_1) + " The record already exists : " + email) connection.close() except Exception as e: print(str(i_1) + " An error with the email : " + email + " " + str(e)) else: print(str(i_1) + ' no email business') else: print('sorry there is nothing') if number_of_pages > 1: for i in range(2, number_of_pages + 1): url_of_one_page_of_results = url_search + "/" + str(i) print(url_of_one_page_of_results) time.sleep(2) html_of_one_page_of_results = requests.get(url_of_one_page_of_results) soup_of_one_page_of_results = BeautifulSoup(html_of_one_page_of_results.content, 'html.parser') if soup_of_one_page_of_results.find('div', {'class': 'company_list'}) is not None: for result_item in soup_of_one_page_of_results \ .find('div', {'class': 'company_list'}) \ .find_all('div', {'class': 'item'}): i_1 += 1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') # Request the content of a page from the url html_result = requests.get(url_result) # Parse the content of html_doc soup_result = BeautifulSoup(html_result.content, 'html.parser') if soup_result.find('a', {'itemprop': 'email'}) is not None: email = "info@" + \ soup_result.find('a', {'itemprop': 'email'}).text.split("@")[1] try: connection = pymysql.connect( host='localhost', port=3306, user='', password='', db='contacts_professionnels', charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor ) with connection.cursor() as cursor: try: sql = "INSERT INTO `emails` (" \ "`id_activite`, " \ "`id_capitale_du_monde`, " \ "`email`) VALUE (%s, %s, %s)" cursor.execute(sql, ( activite.get('id'), capitale.get('id'), email)) connection.commit() print(str(i_1) + " The record is stored : " + email) connection.close() except: print(str(i_1) + " The record already exists : " + email) connection.close() except Exception as e: print(str(i_1) + " An error with the email : " + email + " " + str(e)) else: print(str(i_1) + ' no email business') else: print('sorry there is nothing') except Exception as e: print("There is an error connection at url : " + str(e)) finally: print('done') if __name__ == '__main__': unittest.main()
[((346, 4, 346, 19), 'unittest.main', 'unittest.main', ({}, {}), '()', False, 'import unittest\n'), ((13, 15, 13, 32), 'requests.get', 'requests.get', ({(13, 28, 13, 31): 'url'}, {}), '(url)', False, 'import requests\n'), ((16, 15, 16, 57), 'bs4.BeautifulSoup', 'BeautifulSoup', ({(16, 29, 16, 41): 'html.content', (16, 43, 16, 56): '"""html.parser"""'}, {}), "(html.content, 'html.parser')", False, 'from bs4 import BeautifulSoup\n'), ((28, 22, 28, 46), 'requests.get', 'requests.get', ({(28, 35, 28, 45): 'url_search'}, {}), '(url_search)', False, 'import requests\n'), ((29, 22, 29, 71), 'bs4.BeautifulSoup', 'BeautifulSoup', ({(29, 36, 29, 55): 'html_search.content', (29, 57, 29, 70): '"""html.parser"""'}, {}), "(html_search.content, 'html.parser')", False, 'from bs4 import BeautifulSoup\n'), ((57, 16, 57, 29), 'time.sleep', 'time.sleep', ({(57, 27, 57, 28): '(2)'}, {}), '(2)', False, 'import time\n'), ((60, 30, 60, 54), 'requests.get', 'requests.get', ({(60, 43, 60, 53): 'url_result'}, {}), '(url_result)', False, 'import requests\n'), ((63, 30, 63, 79), 'bs4.BeautifulSoup', 'BeautifulSoup', ({(63, 44, 63, 63): 'html_result.content', (63, 65, 63, 78): '"""html.parser"""'}, {}), "(html_result.content, 'html.parser')", False, 'from bs4 import BeautifulSoup\n'), ((77, 16, 77, 29), 'time.sleep', 'time.sleep', ({(77, 27, 77, 28): '(2)'}, {}), '(2)', False, 'import time\n'), ((78, 46, 78, 86), 'requests.get', 'requests.get', ({(78, 59, 78, 85): 'url_of_one_page_of_results'}, {}), '(url_of_one_page_of_results)', False, 'import requests\n'), ((79, 46, 79, 111), 'bs4.BeautifulSoup', 'BeautifulSoup', ({(79, 60, 79, 95): 'html_of_one_page_of_results.content', (79, 97, 79, 110): '"""html.parser"""'}, {}), "(html_of_one_page_of_results.content, 'html.parser')", False, 'from bs4 import BeautifulSoup\n'), ((90, 38, 90, 62), 'requests.get', 'requests.get', ({(90, 51, 90, 61): 'url_result'}, {}), '(url_result)', False, 'import requests\n'), ((93, 38, 93, 87), 'bs4.BeautifulSoup', 'BeautifulSoup', ({(93, 52, 93, 71): 'html_result.content', (93, 73, 93, 86): '"""html.parser"""'}, {}), "(html_result.content, 'html.parser')", False, 'from bs4 import BeautifulSoup\n'), ((205, 38, 205, 62), 'requests.get', 'requests.get', ({(205, 51, 205, 61): 'url_search'}, {}), '(url_search)', False, 'import requests\n'), ((206, 38, 206, 87), 'bs4.BeautifulSoup', 'BeautifulSoup', ({(206, 52, 206, 71): 'html_search.content', (206, 73, 206, 86): '"""html.parser"""'}, {}), "(html_search.content, 'html.parser')", False, 'from bs4 import BeautifulSoup\n'), ((234, 32, 234, 45), 'time.sleep', 'time.sleep', ({(234, 43, 234, 44): '(2)'}, {}), '(2)', False, 'import time\n'), ((237, 46, 237, 70), 'requests.get', 'requests.get', ({(237, 59, 237, 69): 'url_result'}, {}), '(url_result)', False, 'import requests\n'), ((240, 46, 240, 95), 'bs4.BeautifulSoup', 'BeautifulSoup', ({(240, 60, 240, 79): 'html_result.content', (240, 81, 240, 94): '"""html.parser"""'}, {}), "(html_result.content, 'html.parser')", False, 'from bs4 import BeautifulSoup\n'), ((283, 32, 283, 45), 'time.sleep', 'time.sleep', ({(283, 43, 283, 44): '(2)'}, {}), '(2)', False, 'import time\n'), ((284, 62, 284, 102), 'requests.get', 'requests.get', ({(284, 75, 284, 101): 'url_of_one_page_of_results'}, {}), '(url_of_one_page_of_results)', False, 'import requests\n'), ((285, 62, 286, 90), 'bs4.BeautifulSoup', 'BeautifulSoup', ({(285, 76, 285, 111): 'html_of_one_page_of_results.content', (286, 76, 286, 89): '"""html.parser"""'}, {}), "(html_of_one_page_of_results.content, 'html.parser')", False, 'from bs4 import BeautifulSoup\n'), ((297, 54, 297, 78), 'requests.get', 'requests.get', ({(297, 67, 297, 77): 'url_result'}, {}), '(url_result)', False, 'import requests\n'), ((300, 54, 300, 103), 'bs4.BeautifulSoup', 'BeautifulSoup', ({(300, 68, 300, 87): 'html_result.content', (300, 89, 300, 102): '"""html.parser"""'}, {}), "(html_result.content, 'html.parser')", False, 'from bs4 import BeautifulSoup\n')]
vatervonacht/dagster
python_modules/dagster/dagster_tests/compat_tests/test_back_compat.py
595d78c883ef20618052ac1575fe46cde51fd541
# pylint: disable=protected-access import os import re import pytest from dagster import file_relative_path from dagster.core.errors import DagsterInstanceMigrationRequired from dagster.core.instance import DagsterInstance, InstanceRef from dagster.utils.test import restore_directory # test that we can load runs and events from an old instance def test_0_6_4(): test_dir = file_relative_path(__file__, 'snapshot_0_6_4') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is out of date and must be migrated (SqliteEventLogStorage for run ' 'c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5). Database is at revision None, head is ' '567bc23fd1ac. Please run `dagster instance migrate`.' ), ): for run in runs: instance.all_logs(run.run_id) def test_0_6_6_sqlite_exc(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() # Note that this is a deliberate choice -- old runs are simply invisible, and their # presence won't raise DagsterInstanceMigrationRequired. This is a reasonable choice since # the runs.db has moved and otherwise we would have to do a check for the existence of an # old runs.db every time we accessed the runs. Instead, we'll do this only in the upgrade # method. assert len(runs) == 0 run_ids = instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is out of date and must be migrated (SqliteEventLogStorage for run ' '89296095-892d-4a15-aa0d-9018d1580945). Database is at revision None, head is ' '567bc23fd1ac. Please run `dagster instance migrate`.' ), ): instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') def test_0_6_6_sqlite_migrate(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') assert os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db')) assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/history/runs.db')) with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) instance.upgrade() runs = instance.get_runs() assert len(runs) == 1 run_ids = instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db')) assert os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/history/runs.db'))
[((15, 15, 15, 61), 'dagster.file_relative_path', 'file_relative_path', ({(15, 34, 15, 42): '__file__', (15, 44, 15, 60): '"""snapshot_0_6_4"""'}, {}), "(__file__, 'snapshot_0_6_4')", False, 'from dagster import file_relative_path\n'), ((33, 15, 33, 68), 'dagster.file_relative_path', 'file_relative_path', ({(33, 34, 33, 42): '__file__', (33, 44, 33, 67): '"""snapshot_0_6_6/sqlite"""'}, {}), "(__file__, 'snapshot_0_6_6/sqlite')", False, 'from dagster import file_relative_path\n'), ((59, 15, 59, 68), 'dagster.file_relative_path', 'file_relative_path', ({(59, 34, 59, 42): '__file__', (59, 44, 59, 67): '"""snapshot_0_6_6/sqlite"""'}, {}), "(__file__, 'snapshot_0_6_6/sqlite')", False, 'from dagster import file_relative_path\n'), ((16, 9, 16, 36), 'dagster.utils.test.restore_directory', 'restore_directory', ({(16, 27, 16, 35): 'test_dir'}, {}), '(test_dir)', False, 'from dagster.utils.test import restore_directory\n'), ((34, 9, 34, 36), 'dagster.utils.test.restore_directory', 'restore_directory', ({(34, 27, 34, 35): 'test_dir'}, {}), '(test_dir)', False, 'from dagster.utils.test import restore_directory\n'), ((60, 26, 60, 87), 'dagster.file_relative_path', 'file_relative_path', ({(60, 45, 60, 53): '__file__', (60, 55, 60, 86): '"""snapshot_0_6_6/sqlite/runs.db"""'}, {}), "(__file__, 'snapshot_0_6_6/sqlite/runs.db')", False, 'from dagster import file_relative_path\n'), ((63, 9, 63, 36), 'dagster.utils.test.restore_directory', 'restore_directory', ({(63, 27, 63, 35): 'test_dir'}, {}), '(test_dir)', False, 'from dagster.utils.test import restore_directory\n'), ((17, 44, 17, 74), 'dagster.core.instance.InstanceRef.from_dir', 'InstanceRef.from_dir', ({(17, 65, 17, 73): 'test_dir'}, {}), '(test_dir)', False, 'from dagster.core.instance import DagsterInstance, InstanceRef\n'), ((35, 44, 35, 74), 'dagster.core.instance.InstanceRef.from_dir', 'InstanceRef.from_dir', ({(35, 65, 35, 73): 'test_dir'}, {}), '(test_dir)', False, 'from dagster.core.instance import DagsterInstance, InstanceRef\n'), ((61, 30, 61, 99), 'dagster.file_relative_path', 'file_relative_path', ({(61, 49, 61, 57): '__file__', (61, 59, 61, 98): '"""snapshot_0_6_6/sqlite/history/runs.db"""'}, {}), "(__file__, 'snapshot_0_6_6/sqlite/history/runs.db')", False, 'from dagster import file_relative_path\n'), ((64, 44, 64, 74), 'dagster.core.instance.InstanceRef.from_dir', 'InstanceRef.from_dir', ({(64, 65, 64, 73): 'test_dir'}, {}), '(test_dir)', False, 'from dagster.core.instance import DagsterInstance, InstanceRef\n'), ((76, 30, 76, 99), 'dagster.file_relative_path', 'file_relative_path', ({(76, 49, 76, 57): '__file__', (76, 59, 76, 98): '"""snapshot_0_6_6/sqlite/history/runs.db"""'}, {}), "(__file__, 'snapshot_0_6_6/sqlite/history/runs.db')", False, 'from dagster import file_relative_path\n'), ((75, 34, 75, 95), 'dagster.file_relative_path', 'file_relative_path', ({(75, 53, 75, 61): '__file__', (75, 63, 75, 94): '"""snapshot_0_6_6/sqlite/runs.db"""'}, {}), "(__file__, 'snapshot_0_6_6/sqlite/runs.db')", False, 'from dagster import file_relative_path\n'), ((22, 18, 26, 13), 're.escape', 're.escape', ({(23, 16, 25, 70): '"""Instance is out of date and must be migrated (SqliteEventLogStorage for run c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5). Database is at revision None, head is 567bc23fd1ac. Please run `dagster instance migrate`."""'}, {}), "(\n 'Instance is out of date and must be migrated (SqliteEventLogStorage for run c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5). Database is at revision None, head is 567bc23fd1ac. Please run `dagster instance migrate`.'\n )", False, 'import re\n'), ((49, 18, 53, 13), 're.escape', 're.escape', ({(50, 16, 52, 70): '"""Instance is out of date and must be migrated (SqliteEventLogStorage for run 89296095-892d-4a15-aa0d-9018d1580945). Database is at revision None, head is 567bc23fd1ac. Please run `dagster instance migrate`."""'}, {}), "(\n 'Instance is out of date and must be migrated (SqliteEventLogStorage for run 89296095-892d-4a15-aa0d-9018d1580945). Database is at revision None, head is 567bc23fd1ac. Please run `dagster instance migrate`.'\n )", False, 'import re\n')]
yshrdbrn/bigdata
scripts/charts.py
51114ae98354ee094e0bcff26c1814f85c434148
import matplotlib.pyplot as plt import pandas as pd def group_by_category(df): grouped = df.groupby(['CATEGORY']).size().to_frame('Crimes') labels = ['Trespassing', 'Vehicle theft', 'General Theft', 'Damage to Property', 'Robbery', 'Homicide'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Category') p.get_legend().remove() plt.savefig('../charts/category.png') def group_by_time_of_day(df): grouped = df.groupby(['TIME_OF_DAY']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', labels=['Day', 'Evening', 'Night'], autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Time of Day') p.get_legend().remove() plt.savefig('../charts/time_of_day.png') def group_by_day_of_the_week(df): grouped = df.groupby(['DAY_OF_THE_WEEK']).size().to_frame('Crimes') labels = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Day of The Week') p.get_legend().remove() plt.savefig('../charts/day_of_the_week.png') def group_by_month(df): grouped = df.groupby(['MONTH']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size'] / len(df) grouped = grouped.drop(columns='Size') p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Month') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Month') p.get_legend().remove() plt.savefig('../charts/month.png') def group_by_year(df): grouped = df.groupby(['YEAR']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Year') p.get_legend().remove() plt.savefig('../charts/year.png') def group_by_territory(df): grouped = df.groupby(['PDQ']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size'] / len(df) grouped = grouped.drop(columns='Size') grouped.index = grouped.index.astype(int) p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Territory') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Territory Number') p.get_legend().remove() plt.savefig('../charts/territory.png') if __name__ == '__main__': df = pd.read_csv('../data/crimes_dataset_processed_incomplete.csv') group_by_territory(df) group_by_year(df) group_by_month(df) group_by_time_of_day(df) group_by_day_of_the_week(df) group_by_category(df)
[((12, 4, 12, 41), 'matplotlib.pyplot.savefig', 'plt.savefig', ({(12, 16, 12, 40): '"""../charts/category.png"""'}, {}), "('../charts/category.png')", True, 'import matplotlib.pyplot as plt\n'), ((19, 4, 19, 44), 'matplotlib.pyplot.savefig', 'plt.savefig', ({(19, 16, 19, 43): '"""../charts/time_of_day.png"""'}, {}), "('../charts/time_of_day.png')", True, 'import matplotlib.pyplot as plt\n'), ((27, 4, 27, 48), 'matplotlib.pyplot.savefig', 'plt.savefig', ({(27, 16, 27, 47): '"""../charts/day_of_the_week.png"""'}, {}), "('../charts/day_of_the_week.png')", True, 'import matplotlib.pyplot as plt\n'), ((38, 4, 38, 38), 'matplotlib.pyplot.savefig', 'plt.savefig', ({(38, 16, 38, 37): '"""../charts/month.png"""'}, {}), "('../charts/month.png')", True, 'import matplotlib.pyplot as plt\n'), ((45, 4, 45, 37), 'matplotlib.pyplot.savefig', 'plt.savefig', ({(45, 16, 45, 36): '"""../charts/year.png"""'}, {}), "('../charts/year.png')", True, 'import matplotlib.pyplot as plt\n'), ((57, 4, 57, 42), 'matplotlib.pyplot.savefig', 'plt.savefig', ({(57, 16, 57, 41): '"""../charts/territory.png"""'}, {}), "('../charts/territory.png')", True, 'import matplotlib.pyplot as plt\n'), ((61, 9, 61, 71), 'pandas.read_csv', 'pd.read_csv', ({(61, 21, 61, 70): '"""../data/crimes_dataset_processed_incomplete.csv"""'}, {}), "('../data/crimes_dataset_processed_incomplete.csv')", True, 'import pandas as pd\n')]
benjaminkrenn/abcvoting
unittests.py
1e3833a7314d3467de7560f7e531a4c35c6eda08
# Unit tests import unittest def run_test_instance(unittestinstance, profile, committeesize, tests): import rules_approval # all rules used? for rule in rules_approval.MWRULES: unittestinstance.assertTrue(rule in tests.keys()) for rule in tests.keys(): output = rules_approval.compute_rule(rule, profile, committeesize, resolute=False) unittestinstance.assertEqual( output, tests[rule], msg=rules_approval.MWRULES[rule] + " failed") output = rules_approval.compute_rule( rule, profile, committeesize, resolute=True) unittestinstance.assertEqual( len(output), 1, msg=rules_approval.MWRULES[rule] + " failed with resolute=True") unittestinstance.assertTrue( output[0] in tests[rule], msg=rules_approval.MWRULES[rule] + " failed with resolute=True") class TestApprovalMultiwinner(unittest.TestCase): def test_createprofiles(self): from preferences import Profile from preferences import DichotomousPreferences num_cand = 7 prof = Profile(num_cand) self.assertEqual(prof.add_preferences( DichotomousPreferences([0, 4, 5])), None) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([num_cand])) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([-1])) self.assertEqual(prof.add_preferences([0, 4, 5]), None) with self.assertRaises(Exception): prof.add_preferences([0, 4, 5, "1"]) with self.assertRaises(Exception): prof.add_preferences(["1", 0, 4, 5]) p1 = DichotomousPreferences([0, 4, 5]) p2 = DichotomousPreferences([1, 2]) self.assertEqual(prof.add_preferences([p1, p2]), None) self.assertTrue(prof.has_unit_weights()) prof.add_preferences(DichotomousPreferences([0, 4, 5], 2.4)) self.assertFalse(prof.has_unit_weights()) self.assertEqual(prof.totalweight(), 6.4) def test_mwrules__toofewcandidates(self): from preferences import Profile import rules_approval profile = Profile(5) committeesize = 4 preflist = [[0, 1, 2], [1], [1, 2], [0]] profile.add_preferences(preflist) for rule in rules_approval.MWRULES.keys(): with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize) with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize, resolute=True) def test_mwrules_weightsconsidered(self): from preferences import Profile from preferences import DichotomousPreferences import rules_approval self.longMessage = True profile = Profile(3) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([1], 5)) profile.add_preferences(DichotomousPreferences([0])) committeesize = 1 for rule in rules_approval.MWRULES.keys(): if "monroe" in rule or "rule-x" in rule: # Monroe and rule x only work with unit weights: continue result = rules_approval.compute_rule(rule, profile, committeesize) self.assertTrue([1] in result, msg=rule + " failed"+str(result)) def test_mwrules_correct_simple(self): from preferences import Profile import rules_approval self.longMessage = True profile = Profile(4) profile.add_preferences([[0], [1], [2], [3]]) committeesize = 2 for rule in rules_approval.MWRULES.keys(): if rule == "greedy-monroe": # always returns one committee continue self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize)), 6, msg=rule + " failed") for rule in rules_approval.MWRULES.keys(): self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize, resolute=True)), 1, msg=rule + " failed with resolute=True") def test_monroe_indivisible(self): from preferences import Profile import rules_approval self.longMessage = True profile = Profile(4) profile.add_preferences([[0], [0], [0], [1, 2], [1, 2], [1], [3]]) committeesize = 3 for ilp in [True, False]: # max Monroe score is 6 (even for committee [0, 1, 3]) self.assertEqual( rules_approval.compute_monroe(profile, committeesize, ilp=ilp, resolute=False), [[0, 1, 2], [0, 1, 3], [0, 2, 3]]) # this test shows that tiebreaking is not (yet) # implemented for opt-Phragmen def test_optphrag_notiebreaking(self): from preferences import Profile from rules_approval import compute_rule self.longMessage = True profile = Profile(6) profile.add_preferences([[0], [0], [1, 3], [1, 3], [1, 4], [2, 4], [2, 5], [2, 5]]) committeesize = 3 self.assertEqual( len(compute_rule("optphrag", profile, committeesize, resolute=False)), 12) def test_mwrules_correct_advanced_1(self): from preferences import Profile self.longMessage = True committeesize = 4 profile = Profile(6) preflist = [[0, 4, 5], [0], [1, 4, 5], [1], [2, 4, 5], [2], [3, 4, 5], [3]] profile.add_preferences(preflist) tests1 = { "seqpav": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4, 5], [1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], "av": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4, 5], [1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], "sav": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5], [0, 1, 3, 4], [0, 1, 3, 5], [0, 1, 4, 5], [0, 2, 3, 4], [0, 2, 3, 5], [0, 2, 4, 5], [0, 3, 4, 5], [1, 2, 3, 4], [1, 2, 3, 5], [1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], "pav-ilp": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4, 5], [1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], "pav-noilp": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4, 5], [1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], "revseqpav": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4, 5], [1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], "minimaxav-noilp": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5], [0, 1, 3, 4], [0, 1, 3, 5], [0, 1, 4, 5], [0, 2, 3, 4], [0, 2, 3, 5], [0, 2, 4, 5], [0, 3, 4, 5], [1, 2, 3, 4], [1, 2, 3, 5], [1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], "minimaxav-ilp": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5], [0, 1, 3, 4], [0, 1, 3, 5], [0, 1, 4, 5], [0, 2, 3, 4], [0, 2, 3, 5], [0, 2, 4, 5], [0, 3, 4, 5], [1, 2, 3, 4], [1, 2, 3, 5], [1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], "phrag": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4, 5], [1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], "optphrag": [[0, 1, 2, 3]], "cc-ilp": [[0, 1, 2, 3]], "cc-noilp": [[0, 1, 2, 3]], "seqcc": [[0, 1, 2, 4], [0, 1, 2, 5], [0, 1, 3, 4], [0, 1, 3, 5], [0, 2, 3, 4], [0, 2, 3, 5], [1, 2, 3, 4], [1, 2, 3, 5]], "revseqcc": [[0, 1, 2, 3]], "monroe-ilp": [[0, 1, 2, 3]], "monroe-noilp": [[0, 1, 2, 3]], "greedy-monroe": [[0, 2, 3, 4]], "slav-ilp": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5], [0, 1, 3, 4], [0, 1, 3, 5], [0, 2, 3, 4], [0, 2, 3, 5], [1, 2, 3, 4], [1, 2, 3, 5]], "slav-noilp": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5], [0, 1, 3, 4], [0, 1, 3, 5], [0, 2, 3, 4], [0, 2, 3, 5], [1, 2, 3, 4], [1, 2, 3, 5]], "seqslav": [[0, 1, 2, 4], [0, 1, 2, 5], [0, 1, 3, 4], [0, 1, 3, 5], [0, 2, 3, 4], [0, 2, 3, 5], [1, 2, 3, 4], [1, 2, 3, 5]], "rule-x": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4, 5], [1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], "phragmen-enestroem": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4, 5], [1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], } run_test_instance(self, profile, committeesize, tests1) # and now with reversed preflist preflist.reverse() for p in preflist: p.reverse() profile = Profile(6) profile.add_preferences(preflist) run_test_instance(self, profile, committeesize, tests1) def test_mwrules_correct_advanced_2(self): from preferences import Profile self.longMessage = True # and another profile profile = Profile(5) committeesize = 3 preflist = [[0, 1, 2], [0, 1, 2], [0, 1, 2], [0, 1, 2], [0, 1, 2], [0, 1], [3, 4], [3, 4], [3]] profile.add_preferences(preflist) tests2 = { "seqpav": [[0, 1, 3]], "av": [[0, 1, 2]], "sav": [[0, 1, 3]], "pav-ilp": [[0, 1, 3]], "pav-noilp": [[0, 1, 3]], "revseqpav": [[0, 1, 3]], "minimaxav-noilp": [[0, 1, 3], [0, 2, 3], [1, 2, 3]], "minimaxav-ilp": [[0, 1, 3], [0, 2, 3], [1, 2, 3]], "phrag": [[0, 1, 3]], "optphrag": [[0, 1, 3], [0, 2, 3], [1, 2, 3]], "cc-ilp": [[0, 1, 3], [0, 2, 3], [0, 3, 4], [1, 2, 3], [1, 3, 4]], "cc-noilp": [[0, 1, 3], [0, 2, 3], [0, 3, 4], [1, 2, 3], [1, 3, 4]], "seqcc": [[0, 1, 3], [0, 2, 3], [0, 3, 4], [1, 2, 3], [1, 3, 4]], "revseqcc": [[0, 1, 3], [0, 2, 3], [0, 3, 4], [1, 2, 3], [1, 3, 4]], "monroe-ilp": [[0, 1, 3], [0, 2, 3], [1, 2, 3]], "monroe-noilp": [[0, 1, 3], [0, 2, 3], [1, 2, 3]], "greedy-monroe": [[0, 1, 3]], "seqslav": [[0, 1, 3]], "slav-ilp": [[0, 1, 3]], "slav-noilp": [[0, 1, 3]], "rule-x": [[0, 1, 3]], "phragmen-enestroem": [[0, 1, 3]], } run_test_instance(self, profile, committeesize, tests2) def test_mwrules_correct_advanced_3(self): from preferences import Profile self.longMessage = True # and a third profile profile = Profile(6) committeesize = 4 preflist = [[0, 3, 4, 5], [1, 2], [0, 2, 5], [2], [0, 1, 2, 3, 4], [0, 3, 4], [0, 2, 4], [0, 1]] profile.add_preferences(preflist) tests3 = { "seqpav": [[0, 1, 2, 4]], "av": [[0, 1, 2, 4], [0, 2, 3, 4]], "sav": [[0, 1, 2, 4]], "pav-ilp": [[0, 1, 2, 4]], "pav-noilp": [[0, 1, 2, 4]], "revseqpav": [[0, 1, 2, 4]], "minimaxav-noilp": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 2, 3, 4], [0, 2, 3, 5], [0, 2, 4, 5]], "minimaxav-ilp": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 2, 3, 4], [0, 2, 3, 5], [0, 2, 4, 5]], "phrag": [[0, 1, 2, 4]], "optphrag": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5], [0, 2, 3, 4], [0, 2, 3, 5], [0, 2, 4, 5], [1, 2, 3, 4], [1, 2, 3, 5], [1, 2, 4, 5]], "cc-ilp": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5], [0, 2, 3, 4], [0, 2, 3, 5], [0, 2, 4, 5], [1, 2, 3, 4], [1, 2, 3, 5], [1, 2, 4, 5]], "cc-noilp": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5], [0, 2, 3, 4], [0, 2, 3, 5], [0, 2, 4, 5], [1, 2, 3, 4], [1, 2, 3, 5], [1, 2, 4, 5]], "seqcc": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5], [0, 2, 3, 4], [0, 2, 3, 5], [0, 2, 4, 5]], "revseqcc": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5], [0, 2, 3, 4], [0, 2, 3, 5], [0, 2, 4, 5], [1, 2, 3, 4], [1, 2, 3, 5], [1, 2, 4, 5]], "monroe-ilp": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5], [0, 2, 3, 4], [0, 2, 3, 5], [0, 2, 4, 5], [1, 2, 3, 4], [1, 2, 3, 5], [1, 2, 4, 5]], "monroe-noilp": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5], [0, 2, 3, 4], [0, 2, 3, 5], [0, 2, 4, 5], [1, 2, 3, 4], [1, 2, 3, 5], [1, 2, 4, 5]], "greedy-monroe": [[0, 1, 2, 3]], "seqslav": [[0, 1, 2, 4]], "slav-ilp": [[0, 1, 2, 4]], "slav-noilp": [[0, 1, 2, 4]], "rule-x": [[0, 1, 2, 4]], "phragmen-enestroem": [[0, 1, 2, 4]], } run_test_instance(self, profile, committeesize, tests3) def test_monroescore(self): from preferences import Profile from score_functions import monroescore_flowbased, monroescore_matching self.longMessage = True # and a third profile profile = Profile(6) preflist = [[0, 1], [1], [1, 3], [4], [2], [1, 5, 3]] profile.add_preferences(preflist) self.assertEqual(monroescore_flowbased(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_matching(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_flowbased(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_matching(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_flowbased(profile, [2, 4, 5]), 3) self.assertEqual(monroescore_matching(profile, [2, 5, 4]), 3) if __name__ == '__main__': unittest.main()
[((363, 4, 363, 19), 'unittest.main', 'unittest.main', ({}, {}), '()', False, 'import unittest\n'), ((16, 17, 18, 60), 'rules_approval.compute_rule', 'rules_approval.compute_rule', (), '', False, 'import rules_approval\n'), ((21, 17, 22, 56), 'rules_approval.compute_rule', 'rules_approval.compute_rule', (), '', False, 'import rules_approval\n'), ((36, 15, 36, 32), 'preferences.Profile', 'Profile', ({(36, 23, 36, 31): 'num_cand'}, {}), '(num_cand)', False, 'from preferences import Profile\n'), ((49, 13, 49, 46), 'preferences.DichotomousPreferences', 'DichotomousPreferences', ({(49, 36, 49, 45): '[0, 4, 5]'}, {}), '([0, 4, 5])', False, 'from preferences import DichotomousPreferences\n'), ((50, 13, 50, 43), 'preferences.DichotomousPreferences', 'DichotomousPreferences', ({(50, 36, 50, 42): '[1, 2]'}, {}), '([1, 2])', False, 'from preferences import DichotomousPreferences\n'), ((60, 18, 60, 28), 'preferences.Profile', 'Profile', ({(60, 26, 60, 27): '5'}, {}), '(5)', False, 'from preferences import Profile\n'), ((65, 20, 65, 49), 'rules_approval.MWRULES.keys', 'rules_approval.MWRULES.keys', ({}, {}), '()', False, 'import rules_approval\n'), ((79, 18, 79, 28), 'preferences.Profile', 'Profile', ({(79, 26, 79, 27): '3'}, {}), '(3)', False, 'from preferences import Profile\n'), ((86, 20, 86, 49), 'rules_approval.MWRULES.keys', 'rules_approval.MWRULES.keys', ({}, {}), '()', False, 'import rules_approval\n'), ((100, 18, 100, 28), 'preferences.Profile', 'Profile', ({(100, 26, 100, 27): '4'}, {}), '(4)', False, 'from preferences import Profile\n'), ((104, 20, 104, 49), 'rules_approval.MWRULES.keys', 'rules_approval.MWRULES.keys', ({}, {}), '()', False, 'import rules_approval\n'), ((111, 20, 111, 49), 'rules_approval.MWRULES.keys', 'rules_approval.MWRULES.keys', ({}, {}), '()', False, 'import rules_approval\n'), ((123, 18, 123, 28), 'preferences.Profile', 'Profile', ({(123, 26, 123, 27): '4'}, {}), '(4)', False, 'from preferences import Profile\n'), ((142, 18, 142, 28), 'preferences.Profile', 'Profile', ({(142, 26, 142, 27): '6'}, {}), '(6)', False, 'from preferences import Profile\n'), ((158, 18, 158, 28), 'preferences.Profile', 'Profile', ({(158, 26, 158, 27): '6'}, {}), '(6)', False, 'from preferences import Profile\n'), ((227, 18, 227, 28), 'preferences.Profile', 'Profile', ({(227, 26, 227, 27): '6'}, {}), '(6)', False, 'from preferences import Profile\n'), ((238, 18, 238, 28), 'preferences.Profile', 'Profile', ({(238, 26, 238, 27): '5'}, {}), '(5)', False, 'from preferences import Profile\n'), ((281, 18, 281, 28), 'preferences.Profile', 'Profile', ({(281, 26, 281, 27): '6'}, {}), '(6)', False, 'from preferences import Profile\n'), ((350, 18, 350, 28), 'preferences.Profile', 'Profile', ({(350, 26, 350, 27): '6'}, {}), '(6)', False, 'from preferences import Profile\n'), ((53, 29, 53, 67), 'preferences.DichotomousPreferences', 'DichotomousPreferences', ({(53, 52, 53, 61): '[0, 4, 5]', (53, 63, 53, 66): '(2.4)'}, {}), '([0, 4, 5], 2.4)', False, 'from preferences import DichotomousPreferences\n'), ((80, 32, 80, 59), 'preferences.DichotomousPreferences', 'DichotomousPreferences', ({(80, 55, 80, 58): '[0]'}, {}), '([0])', False, 'from preferences import DichotomousPreferences\n'), ((81, 32, 81, 59), 'preferences.DichotomousPreferences', 'DichotomousPreferences', ({(81, 55, 81, 58): '[0]'}, {}), '([0])', False, 'from preferences import DichotomousPreferences\n'), ((82, 32, 82, 62), 'preferences.DichotomousPreferences', 'DichotomousPreferences', ({(82, 55, 82, 58): '[1]', (82, 60, 82, 61): '(5)'}, {}), '([1], 5)', False, 'from preferences import DichotomousPreferences\n'), ((83, 32, 83, 59), 'preferences.DichotomousPreferences', 'DichotomousPreferences', ({(83, 55, 83, 58): '[0]'}, {}), '([0])', False, 'from preferences import DichotomousPreferences\n'), ((90, 21, 90, 78), 'rules_approval.compute_rule', 'rules_approval.compute_rule', ({(90, 49, 90, 53): 'rule', (90, 55, 90, 62): 'profile', (90, 64, 90, 77): 'committeesize'}, {}), '(rule, profile, committeesize)', False, 'import rules_approval\n'), ((354, 25, 354, 66), 'score_functions.monroescore_flowbased', 'monroescore_flowbased', ({(354, 47, 354, 54): 'profile', (354, 56, 354, 65): '[1, 3, 2]'}, {}), '(profile, [1, 3, 2])', False, 'from score_functions import monroescore_flowbased, monroescore_matching\n'), ((355, 25, 355, 65), 'score_functions.monroescore_matching', 'monroescore_matching', ({(355, 46, 355, 53): 'profile', (355, 55, 355, 64): '[1, 3, 2]'}, {}), '(profile, [1, 3, 2])', False, 'from score_functions import monroescore_flowbased, monroescore_matching\n'), ((356, 25, 356, 66), 'score_functions.monroescore_flowbased', 'monroescore_flowbased', ({(356, 47, 356, 54): 'profile', (356, 56, 356, 65): '[2, 1, 5]'}, {}), '(profile, [2, 1, 5])', False, 'from score_functions import monroescore_flowbased, monroescore_matching\n'), ((357, 25, 357, 65), 'score_functions.monroescore_matching', 'monroescore_matching', ({(357, 46, 357, 53): 'profile', (357, 55, 357, 64): '[2, 1, 5]'}, {}), '(profile, [2, 1, 5])', False, 'from score_functions import monroescore_flowbased, monroescore_matching\n'), ((358, 25, 358, 66), 'score_functions.monroescore_flowbased', 'monroescore_flowbased', ({(358, 47, 358, 54): 'profile', (358, 56, 358, 65): '[2, 4, 5]'}, {}), '(profile, [2, 4, 5])', False, 'from score_functions import monroescore_flowbased, monroescore_matching\n'), ((359, 25, 359, 65), 'score_functions.monroescore_matching', 'monroescore_matching', ({(359, 46, 359, 53): 'profile', (359, 55, 359, 64): '[2, 5, 4]'}, {}), '(profile, [2, 5, 4])', False, 'from score_functions import monroescore_flowbased, monroescore_matching\n'), ((38, 12, 38, 45), 'preferences.DichotomousPreferences', 'DichotomousPreferences', ({(38, 35, 38, 44): '[0, 4, 5]'}, {}), '([0, 4, 5])', False, 'from preferences import DichotomousPreferences\n'), ((41, 33, 41, 67), 'preferences.DichotomousPreferences', 'DichotomousPreferences', ({(41, 56, 41, 66): '[num_cand]'}, {}), '([num_cand])', False, 'from preferences import DichotomousPreferences\n'), ((43, 33, 43, 61), 'preferences.DichotomousPreferences', 'DichotomousPreferences', ({(43, 56, 43, 60): '[-1]'}, {}), '([-1])', False, 'from preferences import DichotomousPreferences\n'), ((67, 16, 67, 73), 'rules_approval.compute_rule', 'rules_approval.compute_rule', ({(67, 44, 67, 48): 'rule', (67, 50, 67, 57): 'profile', (67, 59, 67, 72): 'committeesize'}, {}), '(rule, profile, committeesize)', False, 'import rules_approval\n'), ((69, 16, 70, 73), 'rules_approval.compute_rule', 'rules_approval.compute_rule', (), '', False, 'import rules_approval\n'), ((130, 16, 131, 70), 'rules_approval.compute_monroe', 'rules_approval.compute_monroe', (), '', False, 'import rules_approval\n'), ((148, 20, 149, 48), 'rules_approval.compute_rule', 'compute_rule', (), '', False, 'from rules_approval import compute_rule\n'), ((107, 33, 108, 75), 'rules_approval.compute_rule', 'rules_approval.compute_rule', ({(107, 61, 107, 65): 'rule', (107, 67, 107, 74): 'profile', (108, 61, 108, 74): 'committeesize'}, {}), '(rule, profile, committeesize)', False, 'import rules_approval\n'), ((112, 33, 114, 75), 'rules_approval.compute_rule', 'rules_approval.compute_rule', (), '', False, 'import rules_approval\n')]
behnoud-bazrafshan/ThesisPortfolio
Robustness Check/Calculating Risk Factors/calculate_momentum_factor.py
2edda0109fb8aafc984b5dfc2e59cabb949b4a78
import pandas as pd import numpy as np import jdatetime pd.options.mode.chained_assignment = None # Read Bourseview data for market cap # Concat all 75 tickers' data me_list = [] for file_number in range(1, 76): print(file_number) me_path = f'E:/Thesis/New Sampling/Daily Data - Bourseview/'\ f'{file_number}.xlsx' me_df = pd.read_excel( me_path, skiprows=7, usecols=[2, 3, 11], names=['date', 'open', 'market_cap'], na_values='-' ) # Change order from old to new dates me_df = me_df[::-1].reset_index(drop=True) me_df['date'] = me_df['date'].str.replace('-', '') # Delete non-traded days me_df.dropna(subset=['open'], inplace=True) me_df.drop(columns='open', inplace=True) # Create monthly dataframe me_df = me_df.groupby(me_df['date'].str[:6]).last() me_df = me_df.drop(columns=['date']).reset_index() me_df.insert(1, 'ticker_num', file_number) me_list.append(me_df) me_df = pd.concat(me_list, ignore_index=True) me_df = me_df.loc[(me_df['date'] >= '139212') & (me_df['date'] <= '139900')] me_df.reset_index(drop=True, inplace=True) # Read rahavard 365 data for calculating returns close_list = [] for file_number in range(1, 76): rahavard_path = f'E:/Thesis/New Sampling/Daily Data - Rahavard 365/'\ f'{file_number}.txt' df = pd.read_csv( rahavard_path, usecols=[2, 7], names=['date', 'close'], header=0, dtype={'date': str}, parse_dates=[0] ) # Solve index reading problem, pandas add 2 index to the df df.reset_index(drop=True, inplace=True) # Convert to shamsi dates df['date'] = df['date'].apply( lambda x: jdatetime.date.fromgregorian(date=x).strftime('%Y%m%d') ) # Create monthly dataframe df = df.groupby(df['date'].str[:6]).last() df = df.drop(columns=['date']).reset_index() df.insert(1, 'ticker_num', file_number) df['monthly_return'] = df['close'].pct_change() close_list.append(df) df = pd.concat(close_list, ignore_index=True) df = df.loc[(df['date'] >= '139212') & (df['date'] <= '139900')] # Read index df for indicating open market days index_path = r'E:\Thesis\New Sampling\TEDPIX\شاخص كل6.xls' index_df = pd.read_excel( index_path, usecols=[1], names=['date'], dtype={'date': str} ) index_df.dropna(inplace=True) # The list of all months months = index_df['date'].str[:6].unique().tolist() # The list of months that we need for calculating market cap me_months = [ '139312', '139401', '139402', '139403', '139404', '139405', '139406', '139407', '139408', '139409', '139410', '139411', '139412', '139501', '139502', '139503', '139504', '139505', '139506', '139507', '139508', '139509', '139510', '139511', '139512', '139601', '139602', '139603', '139604', '139605', '139606', '139607', '139608', '139609', '139610', '139611', '139612', '139701', '139702', '139703', '139704', '139705', '139706', '139707', '139708', '139709', '139710', '139711', '139712', '139801', '139802', '139803', '139804', '139805', '139806', '139807', '139808', '139809', '139810', '139811', '139812' ] # The list of months that we need for camculating MOM mom_months = me_months[1:] # Merge market cap and price dfs merged_df = pd.merge(df, me_df, on=['ticker_num', 'date']) # First, create a NaN column, and then add t-13 prices merged_df.insert(5, 't-13 price', np.nan) for month in mom_months: # Find t-13 prices for ticker in range(1, 76): t_13 = months[months.index(month) - 13] t_13_condtion = (merged_df['date'] == t_13) ticker_condition = (merged_df['ticker_num'] == ticker) try: t_13_price = merged_df.loc[ t_13_condtion & ticker_condition ]['close'].values[0] previous_month = me_months[me_months.index(month) - 1] t_1_condtion = (merged_df['date'] == previous_month) merged_df.loc[ (t_1_condtion & ticker_condition), 't-13 price' ] = t_13_price except: pass # Calculate last 12 months return for month t (t-1, t-12) merged_df['past_year_return'] = ( (merged_df['close'] / merged_df['t-13 price']) - 1 ) mom_list = [] for month in mom_months: # Check t-13 price condition and t-1 market cap condition previous_month = months[months.index(month) - 1] me_condition = (merged_df['date'] == previous_month) mom_condition = (merged_df['past_year_return'].notna()) portfo_const_df = merged_df.loc[me_condition & mom_condition] # Split each month ME into two groups conditions = [ ( portfo_const_df['market_cap'] > portfo_const_df['market_cap'].median() ), ( portfo_const_df['market_cap'] <= portfo_const_df['market_cap'].median() ) ] portfolio_size = np.select(conditions, ['B', 'S']).tolist() portfo_const_df.insert(6, 'size', portfolio_size) # Split each me portfolio into 3 MOM group q = [0, .3, .7, 1] labels = ['L', 'M', 'H'] x_b = portfo_const_df.loc[ portfo_const_df['size'] == 'B' ]['past_year_return'] b_mom = pd.qcut(x=x_b, q=q, labels=labels).to_dict() x_s = portfo_const_df.loc[ portfo_const_df['size'] == 'S' ]['past_year_return'] s_mom = pd.qcut(x=x_s, q=q, labels=labels).to_dict() portfo_const_df['mom'] = pd.Series(b_mom) portfo_const_df['mom'].update(pd.Series(s_mom)) # Extrect portfolio ticker numbers portfo_const_df['portfolio'] = ( portfo_const_df['size'] + portfo_const_df['mom'] ) bh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BH' ]['ticker_num'].tolist() bl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BL' ]['ticker_num'].tolist() sh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SH' ]['ticker_num'].tolist() sl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SL' ]['ticker_num'].tolist() # Calculating value-weighted return for each portfolio in month t # Set conditions month_condition = (merged_df['date'] == month) bh_condition = merged_df['ticker_num'].isin(bh) bl_condition = merged_df['ticker_num'].isin(bl) sh_condition = merged_df['ticker_num'].isin(sh) sl_condition = merged_df['ticker_num'].isin(sl) # Construct portfolios bh_portfolio = merged_df.loc[month_condition & bh_condition] bl_portfolio = merged_df.loc[month_condition & bl_condition] sh_portfolio = merged_df.loc[month_condition & sh_condition] sl_portfolio = merged_df.loc[month_condition & sl_condition] # Calculate value-weighted returns bh_return = np.average( bh_portfolio.monthly_return, weights=bh_portfolio.market_cap ) bl_return = np.average( bl_portfolio.monthly_return, weights=bl_portfolio.market_cap ) sh_return = np.average( sh_portfolio.monthly_return, weights=sh_portfolio.market_cap ) sl_return = np.average( sl_portfolio.monthly_return, weights=sl_portfolio.market_cap ) # Calculate MOM, and add it to a list mom = ( ((sh_return + bh_return) / 2) - ((sl_return + bl_return) / 2) ) mom_list.append(mom) mom_df = pd.Series(mom_list).to_excel('mom.xlsx')
[((32, 8, 32, 45), 'pandas.concat', 'pd.concat', (), '', True, 'import pandas as pd\n'), ((60, 5, 60, 45), 'pandas.concat', 'pd.concat', (), '', True, 'import pandas as pd\n'), ((64, 11, 69, 1), 'pandas.read_excel', 'pd.read_excel', (), '', True, 'import pandas as pd\n'), ((88, 12, 88, 58), 'pandas.merge', 'pd.merge', (), '', True, 'import pandas as pd\n'), ((14, 12, 20, 5), 'pandas.read_excel', 'pd.read_excel', (), '', True, 'import pandas as pd\n'), ((40, 9, 47, 5), 'pandas.read_csv', 'pd.read_csv', (), '', True, 'import pandas as pd\n'), ((145, 29, 145, 45), 'pandas.Series', 'pd.Series', ({(145, 39, 145, 44): 'b_mom'}, {}), '(b_mom)', True, 'import pandas as pd\n'), ((176, 16, 179, 5), 'numpy.average', 'np.average', (), '', True, 'import numpy as np\n'), ((180, 16, 183, 5), 'numpy.average', 'np.average', (), '', True, 'import numpy as np\n'), ((184, 16, 187, 5), 'numpy.average', 'np.average', (), '', True, 'import numpy as np\n'), ((188, 16, 191, 5), 'numpy.average', 'np.average', (), '', True, 'import numpy as np\n'), ((146, 34, 146, 50), 'pandas.Series', 'pd.Series', ({(146, 44, 146, 49): 's_mom'}, {}), '(s_mom)', True, 'import pandas as pd\n'), ((198, 9, 198, 28), 'pandas.Series', 'pd.Series', ({(198, 19, 198, 27): 'mom_list'}, {}), '(mom_list)', True, 'import pandas as pd\n'), ((132, 21, 132, 54), 'numpy.select', 'np.select', ({(132, 31, 132, 41): 'conditions', (132, 43, 132, 53): "['B', 'S']"}, {}), "(conditions, ['B', 'S'])", True, 'import numpy as np\n'), ((140, 12, 140, 46), 'pandas.qcut', 'pd.qcut', (), '', True, 'import pandas as pd\n'), ((144, 12, 144, 46), 'pandas.qcut', 'pd.qcut', (), '', True, 'import pandas as pd\n'), ((52, 18, 52, 54), 'jdatetime.date.fromgregorian', 'jdatetime.date.fromgregorian', (), '', False, 'import jdatetime\n')]
aws-samples/siem-on-amazon-opensearch-service
source/lambda/geoip_downloader/index.py
9bac87d39e9fab04f483bae54ffe94948af096ff
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: MIT-0 __copyright__ = ('Copyright Amazon.com, Inc. or its affiliates. ' 'All Rights Reserved.') __version__ = '2.7.1' __license__ = 'MIT-0' __author__ = 'Akihiro Nakajima' __url__ = 'https://github.com/aws-samples/siem-on-amazon-opensearch-service' import hashlib import json import os import tarfile import urllib.error import urllib.parse import urllib.request import boto3 # get var from lambda environment try: s3bucket_name = os.environ['s3bucket_name'] license_key = os.environ['license_key'] except KeyError: raise Exception('ERROR: impossible to get lambda environment') s3key_prefix = os.environ.get('s3key_prefix', 'GeoLite2/') s3 = boto3.resource('s3') bucket = s3.Bucket(s3bucket_name) url = 'https://download.maxmind.com/app/geoip_download?' put_files = ['GeoLite2-City', 'GeoLite2-ASN', 'GeoLite2-Country'] def download_file(filename): for suffix in ['tar.gz', 'tar.gz.sha256']: values = {'edition_id': filename, 'license_key': license_key, 'suffix': suffix} data = urllib.parse.urlencode(values) try: urllib.request.urlretrieve( url + data, filename='/tmp/' + filename + '.' + suffix) except urllib.error.HTTPError as err: if err.status == 401: return err.status print(err) raise Exception('ERROR: http error') except Exception as err: print(err) raise Exception('ERROR: ' + err) print('INFO: ' + filename + ' was downloaded') return 200 def put_to_s3(filename): with open('/tmp/' + filename + '.tar.gz.sha256') as f: checksum = f.read().split()[0] print('INFO: Checksum: ' + checksum) with open('/tmp/' + filename + '.tar.gz', 'rb') as f: calcurated_checksum = hashlib.sha256(f.read()).hexdigest() if checksum not in calcurated_checksum: print('ERROR: checksum is different. download is failed') return False with tarfile.open('/tmp/' + filename + '.tar.gz', 'r:gz') as tf: directory = tf.getmembers()[0].name tf.extractall(path='/tmp/') mmdb = directory + '/' + filename + '.mmdb' s3obj = s3key_prefix + filename + '.mmdb' bucket.upload_file('/tmp/' + mmdb, s3obj) print('INFO: uploaded {0} to s3://{1}/{2}'.format( mmdb, s3bucket_name, s3obj)) def send(event, context, responseStatus, responseData, physicalResourceId=None, noEcho=False): # https://docs.aws.amazon.com/ja_jp/AWSCloudFormation/latest/UserGuide/cfn-lambda-function-code-cfnresponsemodule.html responseUrl = event['ResponseURL'] print(responseUrl) response_body = {} response_body['Status'] = responseStatus response_body['Reason'] = ('See the details in CloudWatch Log Stream: ' '' + context.log_stream_name) response_body['PhysicalResourceId'] = ( physicalResourceId or context.log_stream_name) response_body['StackId'] = event['StackId'] response_body['RequestId'] = event['RequestId'] response_body['LogicalResourceId'] = event['LogicalResourceId'] response_body['NoEcho'] = noEcho response_body['Data'] = responseData json_response_body = json.dumps(response_body) print('Response body:\n' + json_response_body) headers = {'content-type': 'application/json', } req = urllib.request.Request( event['ResponseURL'], json_response_body.encode(), headers=headers, method='PUT') try: res = urllib.request.urlopen(req) print('Status code: ' + str(res.status)) except Exception as e: print('send(..) failed executing requests.put(..): ' + str(e)) def lambda_handler(event, context): physicalResourceId = 'geoipdb' status = 'None' if event: print(json.dumps(event)) try: for filename in put_files: status = download_file(filename) if status == 401: break put_to_s3(filename) except Exception as e: print(e) if event and 'RequestType' in event: response = {'failed_reason': e} send(event, context, 'FAILED', response, physicalResourceId) if event and 'RequestType' in event: if status == 401: response = {'status': 'invalide_license_key'} else: response = {'status': 'downloaded'} send(event, context, 'SUCCESS', response, physicalResourceId) return(json.dumps(response))
[((26, 15, 26, 58), 'os.environ.get', 'os.environ.get', ({(26, 30, 26, 44): '"""s3key_prefix"""', (26, 46, 26, 57): '"""GeoLite2/"""'}, {}), "('s3key_prefix', 'GeoLite2/')", False, 'import os\n'), ((28, 5, 28, 25), 'boto3.resource', 'boto3.resource', ({(28, 20, 28, 24): '"""s3"""'}, {}), "('s3')", False, 'import boto3\n'), ((94, 25, 94, 50), 'json.dumps', 'json.dumps', ({(94, 36, 94, 49): 'response_body'}, {}), '(response_body)', False, 'import json\n'), ((66, 9, 66, 61), 'tarfile.open', 'tarfile.open', ({(66, 22, 66, 52): "('/tmp/' + filename + '.tar.gz')", (66, 54, 66, 60): '"""r:gz"""'}, {}), "('/tmp/' + filename + '.tar.gz', 'r:gz')", False, 'import tarfile\n'), ((132, 15, 132, 35), 'json.dumps', 'json.dumps', ({(132, 26, 132, 34): 'response'}, {}), '(response)', False, 'import json\n'), ((113, 14, 113, 31), 'json.dumps', 'json.dumps', ({(113, 25, 113, 30): 'event'}, {}), '(event)', False, 'import json\n')]
earthobservatory/isce2
components/mroipac/baseline/Baseline.py
655c46cc4add275879167b750a5e91f6d00f168e
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # United States Government Sponsorship acknowledged. This software is subject to # U.S. export control laws and regulations and has been classified as 'EAR99 NLR' # (No [Export] License Required except when exporting to an embargoed country, # end user, or in support of a prohibited end use). By downloading this software, # the user agrees to comply with all applicable U.S. export laws and regulations. # The user has the responsibility to obtain export licenses, or other export # authority as may be required before exporting this software to any 'EAR99' # embargoed foreign country or citizen of those countries. # # Author: Giangi Sacco #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ import math import datetime import logging from iscesys.Component.Component import Component, Port from isceobj.Util.mathModule import MathModule as MM from isceobj.Orbit.Orbit import StateVector # A class to hold three-dimensional basis vectors class Basis(object): def __init__(self): self.x1 = [] self.x2 = [] self.x3 = [] # A class to hold three-dimensional basis vectors for spacecraft baselines class BaselineBasis(Basis): def __init__(self): Basis.__init__(self) def setPositionVector(self,x): self.x1 = x def getPositionVector(self): return self.x1 def setVelocityVector(self,v): self.x2 = v def getVelocityVector(self): return self.x2 def setCrossTrackVector(self,c): self.x3 = c def getCrossTrackVector(self): return self.x3 BASELINE_LOCATION = Component.Parameter('baselineLocation', public_name = 'BASELINE_LOCATION', default = 'all', type=str, mandatory=False, doc = ('Location at which to compute baselines - "all" implies '+ 'top, middle, bottom of master image, '+ '"top" implies near start of master image, '+ '"bottom" implies at bottom of master image, '+ '"middle" implies near middle of master image. '+ 'To be used in case there is a large shift between images.') ) class Baseline(Component): family = 'baseline' logging_name = 'isce.mroipac.baseline' parameter_list = (BASELINE_LOCATION,) # Calculate the Look Angle of the master frame def calculateLookAngle(self): lookVector = self.calculateLookVector() return math.degrees(math.atan2(lookVector[1],lookVector[0])) # Calculate the look vector of the master frame def calculateLookVector(self): try: z = self.masterFrame.terrainHeight except: z = 0.0 cosl = ((self.height-z)*(2*self.radius + self.height + z) + self.startingRange1*self.startingRange1)/( 2*self.startingRange1*(self.radius + self.height) ) # print('Height: ', self.height) # print('Radius: ', self.radius) # print('Range: ', self.startingRange1) # print('COSL: ', cosl) sinl = math.sqrt(1 - cosl*cosl) return [cosl,sinl] # Calculate the scalar spacecraft velocity def calculateScalarVelocity(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') v = sv.getVelocity() normV = MM.norm(v) return normV # Given an orbit and a time, calculate an orthogonal basis for cross-track and velocity directions # based on the spacecraft position def calculateBasis(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') x1 = sv.getPosition() v = sv.getVelocity() r = MM.normalizeVector(x1) # Turn the position vector into a unit vector v = MM.normalizeVector(v) # Turn the velocity vector into a unit vector c = MM.crossProduct(r,v) # Calculate the vector perpendicular to the platform position and velocity, this is the c, or cross-track vector c = MM.normalizeVector(c) v = MM.crossProduct(c,r) # Calculate a the "velocity" component that is perpendicular to the cross-track direction and position basis = BaselineBasis() basis.setPositionVector(r) basis.setVelocityVector(v) basis.setCrossTrackVector(c) return basis # Given two position vectors and a basis, calculate the offset between the two positions in this basis def calculateBasisOffset(self,x1,x2,basis): dx = [(x2[j] - x1[j]) for j in range(len(x1))] # Calculate the difference between the master and slave position vectors z_offset = MM.dotProduct(dx,basis.getVelocityVector()) # Calculate the length of the projection of the difference in position and the "velocity" component v_offset = MM.dotProduct(dx,basis.getPositionVector()) c_offset = MM.dotProduct(dx,basis.getCrossTrackVector()) return z_offset,v_offset,c_offset # Calculate the baseline components between two frames def baseline(self): #TODO This could be further refactored into a method that calculates the baseline between #TODO frames when given a master time and a slave time and a method that calls this method #TODO multiple times to calculate the rate of baseline change over time. for port in self.inputPorts: port() lookVector = self.calculateLookVector() az_offset = [] vb = [] hb = [] csb = [] asb = [] s = [0.,0.,0.] if self.baselineLocation.lower() == 'all': print('Using entire span of image for estimating baselines') masterTime = [self.masterFrame.getSensingStart(),self.masterFrame.getSensingMid(),self.masterFrame.getSensingStop()] elif self.baselineLocation.lower() == 'middle': print('Estimating baselines around center of master image') masterTime = [self.masterFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingMid(), self.masterFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] elif self.baselineLocation.lower() == 'top': print('Estimating baselines at top of master image') masterTime = [self.masterFrame.getSensingStart(), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=2.0)] elif self.baselineLocation.lower() == 'bottom': print('Estimating baselines at bottom of master image') masterTime = [self.masterFrame.getSensingStop() - datetime.timedelta(seconds=2.0), self.masterFrame.getSensingStop() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStop()] else: raise Exception('Unknown baseline location: {0}'.format(self.baselineLocation)) slaveTime = [self.slaveFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.slaveFrame.getSensingMid(), self.slaveFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] # slaveTime = [self.slaveFrame.getSensingStart(),self.slaveFrame.getSensingMid(),self.slaveFrame.getSensingStop()] for i in range(3): # Calculate the Baseline at the start of the scene, mid-scene, and the end of the scene # First, get the position and velocity at the start of the scene self.logger.info("Sampling time %s" % i) masterBasis = self.calculateBasis(self.masterOrbit,masterTime[i]) normV = self.calculateScalarVelocity(self.masterOrbit,masterTime[i]) # Calculate the distance moved since the last baseline point if (i > 0): deltaT = self._timeDeltaToSeconds(masterTime[i] - masterTime[0]) s[i] = s[i-1] + deltaT*normV masterSV = self.masterOrbit.interpolateOrbit(masterTime[i], method='hermite') slaveSV = self.slaveOrbit.interpolateOrbit(slaveTime[i], method='hermite') x1 = masterSV.getPosition() x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) az_offset.append(z_offset) # Save the position offset # Calculate a new start time relativeSlaveTime = slaveTime[i] - datetime.timedelta(seconds=(z_offset/normV)) slaveSV = self.slaveOrbit.interpolateOrbit(relativeSlaveTime, method='hermite') # Recalculate the offsets x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) vb.append(v_offset) hb.append(c_offset) csb.append(-hb[i]*lookVector[0] + vb[i]*lookVector[1]) # Multiply the horizontal and vertical baseline components by the look angle vector asb.append(-hb[i]*lookVector[1] - vb[i]*lookVector[0]) #Calculating baseline crossTrackBaselinePolynomialCoefficients = self.polynomialFit(s,hb) verticalBaselinePolynomialCoefficients = self.polynomialFit(s,vb) h_rate = crossTrackBaselinePolynomialCoefficients[1] # Calculate the gross azimuth and range offsets azb_avg = (az_offset[0] + az_offset[-1])/2.0 asb_avg = (asb[0] + asb[-1])/2.0 az_offset = (-azb_avg - h_rate*self.startingRange1*lookVector[1])/(self.azimuthPixelSize) r_offset = (self.startingRange1 - self.startingRange2 - asb_avg)/(self.rangePixelSize) # Populate class attributes self.hBaselineTop = crossTrackBaselinePolynomialCoefficients[0] self.hBaselineRate = crossTrackBaselinePolynomialCoefficients[1] self.hBaselineAcc = crossTrackBaselinePolynomialCoefficients[2] self.vBaselineTop = verticalBaselinePolynomialCoefficients[0] self.vBaselineRate = verticalBaselinePolynomialCoefficients[1] self.vBaselineAcc = verticalBaselinePolynomialCoefficients[2] self.pBaselineTop = csb[0] self.pBaselineBottom = csb[-1] self.orbSlcAzimuthOffset = az_offset self.orbSlcRangeOffset = r_offset self.rangeOffset = self.startingRange1 - self.startingRange2 # Calculate a quadratic fit to the baseline polynomial def polynomialFit(self,xRef,yRef): size = len(xRef) if not (len(xRef) == len(yRef)): print("Error. Expecting input vectors of same length.") raise Exception if not (size == 3): print("Error. Expecting input vectors of length 3.") raise Exception Y = [0]*size A = [0]*size M = [[0 for i in range(size) ] for j in range(size)] for j in range(size): for i in range(size): M[j][i] = math.pow(xRef[j],i) Y[j] = yRef[j] MInv = MM.invertMatrix(M) for i in range(size): for j in range(size): A[i] += MInv[i][j]*Y[j] return A def setRangePixelSize(self,pixelSize): self.rangePixelSize = pixelSize return def setAzimuthPixelSize(self,pixelSize): self.azimuthPixelSize = pixelSize return def setHeight(self,var): self.height = float(var) return def setRadius(self,radius): self.radius = radius return def setMasterStartingRange(self,range): self.startingRange1 = range return def setSlaveStartingRange(self,range): self.startingRange2 = range return def getHBaselineTop(self): return self.hBaselineTop def getHBaselineRate(self): return self.hBaselineRate def getHBaselineAcc(self): return self.hBaselineAcc def getVBaselineTop(self): return self.vBaselineTop def getVBaselineRate(self): return self.vBaselineRate def getVBaselineAcc(self): return self.vBaselineAcc def getPBaselineTop(self): return self.pBaselineTop def getPBaselineBottom(self): return self.pBaselineBottom def getOrbSlcAzimuthOffset(self): return self.orbSlcAzimuthOffset def getOrbSlcRangeOffset(self): return self.orbSlcRangeOffset def getRangeOffset(self): return self.rangeOffset def getPhaseConst(self): return self.phaseConst def getLookAngle(self): return self.lookAngle def _timeDeltaToSeconds(self,td): return (td.microseconds + (td.seconds + td.days * 24.0 * 3600) * 10**6) / 10**6 def addMasterFrame(self): frame = self._inputPorts.getPort(name='masterFrame').getObject() self.masterFrame = frame self.startingRange1 = frame.getStartingRange() prf = frame.getInstrument().getPulseRepetitionFrequency() self.rangePixelSize = frame.getInstrument().getRangePixelSize() self.masterOrbit = frame.getOrbit() midSV = self.masterOrbit.interpolateOrbit(frame.getSensingMid(), method='hermite') self.azimuthPixelSize = midSV.getScalarVelocity()/prf try: ellipsoid = frame._ellipsoid #UAVSAR frame creates ellipsoid with peg self.radius = ellipsoid.pegRadCur self.height = frame.platformHeight except: ellipsoid = frame.getInstrument().getPlatform().getPlanet().get_elp() self.radius = ellipsoid.get_a() self.height = midSV.calculateHeight(ellipsoid) def addSlaveFrame(self): frame = self._inputPorts.getPort(name='slaveFrame').getObject() self.slaveFrame = frame self.startingRange2 = frame.getStartingRange() self.slaveOrbit = frame.getOrbit() def __init__(self, name=''): self.masterOrbit = None self.slaveOrbit = None self.masterFrame = None self.slaveFrame = None self.lookAngle = None self.rangePixelSize = None self.azimuthPixelSize = None self.height = None self.radius = None self.startingRange1 = None self.startingRange2 = None self.hBaselineTop = None self.hBaselineRate = None self.hBaselineAcc = None self.vBaselineTop = None self.vBaselineRate = None self.vBaselineAcc = None self.pBaselineTop = None self.pBaselineBottom = None self.orbSlcAzimuthOffset = None self.orbSlcRangeOffset = None self.rangeOffset = None self.phaseConst = -99999 super(Baseline, self).__init__(family=self.__class__.family, name=name) self.logger = logging.getLogger('isce.mroipac.baseline') self.createPorts() # Satisfy the old Component self.dictionaryOfOutputVariables = {} self.dictionaryOfVariables = {} self.descriptionOfVariables = {} self.mandatoryVariables = [] self.optionalVariables = [] return None def createPorts(self): # Set input ports # It looks like we really need two orbits, a time, range and azimuth pixel sizes # the two starting ranges, a planet, and the two prfs # These provide the orbits # These provide the range and azimuth pixel sizes, starting ranges, # satellite heights and times for the first lines masterFramePort = Port(name='masterFrame',method=self.addMasterFrame) slaveFramePort = Port(name='slaveFrame',method=self.addSlaveFrame) self._inputPorts.add(masterFramePort) self._inputPorts.add(slaveFramePort) return None def __str__(self): retstr = "Initial Baseline estimates \n" retstr += "Cross-track Baseline: %s\n" retlst = (self.hBaselineTop,) retstr += "Vertical Baseline: %s\n" retlst += (self.vBaselineTop,) retstr += "Perpendicular Baseline: %s\n" retlst += (self.pBaselineTop,) retstr += "Bulk Azimuth Offset: %s\n" retlst += (self.orbSlcAzimuthOffset,) retstr += "Bulk Range Offset: %s\n" retlst += (self.orbSlcRangeOffset,) return retstr % retlst
[((70, 20, 81, 1), 'iscesys.Component.Component.Component.Parameter', 'Component.Parameter', (), '', False, 'from iscesys.Component.Component import Component, Port\n'), ((111, 15, 111, 39), 'math.sqrt', 'math.sqrt', ({(111, 25, 111, 38): '1 - cosl * cosl'}, {}), '(1 - cosl * cosl)', False, 'import math\n'), ((118, 16, 118, 26), 'isceobj.Util.mathModule.MathModule.norm', 'MM.norm', ({(118, 24, 118, 25): 'v'}, {}), '(v)', True, 'from isceobj.Util.mathModule import MathModule as MM\n'), ((129, 12, 129, 34), 'isceobj.Util.mathModule.MathModule.normalizeVector', 'MM.normalizeVector', ({(129, 31, 129, 33): 'x1'}, {}), '(x1)', True, 'from isceobj.Util.mathModule import MathModule as MM\n'), ((130, 12, 130, 33), 'isceobj.Util.mathModule.MathModule.normalizeVector', 'MM.normalizeVector', ({(130, 31, 130, 32): 'v'}, {}), '(v)', True, 'from isceobj.Util.mathModule import MathModule as MM\n'), ((131, 12, 131, 32), 'isceobj.Util.mathModule.MathModule.crossProduct', 'MM.crossProduct', ({(131, 28, 131, 29): 'r', (131, 30, 131, 31): 'v'}, {}), '(r, v)', True, 'from isceobj.Util.mathModule import MathModule as MM\n'), ((132, 12, 132, 33), 'isceobj.Util.mathModule.MathModule.normalizeVector', 'MM.normalizeVector', ({(132, 31, 132, 32): 'c'}, {}), '(c)', True, 'from isceobj.Util.mathModule import MathModule as MM\n'), ((133, 12, 133, 32), 'isceobj.Util.mathModule.MathModule.crossProduct', 'MM.crossProduct', ({(133, 28, 133, 29): 'c', (133, 30, 133, 31): 'r'}, {}), '(c, r)', True, 'from isceobj.Util.mathModule import MathModule as MM\n'), ((258, 16, 258, 34), 'isceobj.Util.mathModule.MathModule.invertMatrix', 'MM.invertMatrix', ({(258, 32, 258, 33): 'M'}, {}), '(M)', True, 'from isceobj.Util.mathModule import MathModule as MM\n'), ((383, 22, 383, 64), 'logging.getLogger', 'logging.getLogger', ({(383, 40, 383, 63): '"""isce.mroipac.baseline"""'}, {}), "('isce.mroipac.baseline')", False, 'import logging\n'), ((402, 26, 402, 77), 'iscesys.Component.Component.Port', 'Port', (), '', False, 'from iscesys.Component.Component import Component, Port\n'), ((403, 25, 403, 74), 'iscesys.Component.Component.Port', 'Port', (), '', False, 'from iscesys.Component.Component import Component, Port\n'), ((95, 28, 95, 67), 'math.atan2', 'math.atan2', ({(95, 39, 95, 52): 'lookVector[1]', (95, 53, 95, 66): 'lookVector[0]'}, {}), '(lookVector[1], lookVector[0])', False, 'import math\n'), ((185, 55, 185, 86), 'datetime.timedelta', 'datetime.timedelta', (), '', False, 'import datetime\n'), ((185, 155, 185, 186), 'datetime.timedelta', 'datetime.timedelta', (), '', False, 'import datetime\n'), ((209, 47, 209, 91), 'datetime.timedelta', 'datetime.timedelta', (), '', False, 'import datetime\n'), ((256, 26, 256, 45), 'math.pow', 'math.pow', ({(256, 35, 256, 42): 'xRef[j]', (256, 43, 256, 44): 'i'}, {}), '(xRef[j], i)', False, 'import math\n'), ((173, 61, 173, 92), 'datetime.timedelta', 'datetime.timedelta', (), '', False, 'import datetime\n'), ((173, 163, 173, 194), 'datetime.timedelta', 'datetime.timedelta', (), '', False, 'import datetime\n'), ((177, 100, 177, 131), 'datetime.timedelta', 'datetime.timedelta', (), '', False, 'import datetime\n'), ((177, 170, 177, 201), 'datetime.timedelta', 'datetime.timedelta', (), '', False, 'import datetime\n'), ((180, 63, 180, 94), 'datetime.timedelta', 'datetime.timedelta', (), '', False, 'import datetime\n'), ((180, 132, 180, 163), 'datetime.timedelta', 'datetime.timedelta', (), '', False, 'import datetime\n')]
Bot-Box/FiveCardStud
src/modules/deuces/deck.py
55e11d7a23becece33658075f922cf007909d058
from random import shuffle as rshuffle from .card import Card class Deck: """ Class representing a deck. The first time we create, we seed the static deck with the list of unique card integers. Each object instantiated simply makes a copy of this object and shuffles it. """ _FULL_DECK = [] def __init__(self): self.shuffle() def shuffle(self): # and then shuffle self.cards = Deck.GetFullDeck() rshuffle(self.cards) def draw(self, n=1): if n == 1: return self.cards.pop(0) cards = [] for i in range(n): cards.append(self.draw()) return cards def __str__(self): return Card.print_pretty_cards(self.cards) @staticmethod def GetFullDeck(): if Deck._FULL_DECK: return list(Deck._FULL_DECK) # create the standard 52 card deck for rank in Card.STR_RANKS: for suit, val in Card.CHAR_SUIT_TO_INT_SUIT.items(): Deck._FULL_DECK.append(Card.new(rank + suit)) return list(Deck._FULL_DECK)
[((19, 8, 19, 28), 'random.shuffle', 'rshuffle', ({(19, 17, 19, 27): 'self.cards'}, {}), '(self.cards)', True, 'from random import shuffle as rshuffle\n')]
puiterwijk/python-openidc-client
openidc_client/__init__.py
cd8d91c0503124305727f38a0f9fe93bb472209c
# -*- coding: utf-8 -*- # # Copyright (C) 2016, 2017 Red Hat, Inc. # Red Hat Author: Patrick Uiterwijk <[email protected]> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. """Client for applications relying on OpenID Connect for authentication.""" from __future__ import print_function from copy import copy import json import logging from threading import Lock import time try: from StringIO import StringIO except ImportError: from io import StringIO import socket import os try: from urllib import urlencode except ImportError: from urllib.parse import urlencode from uuid import uuid4 as uuidgen import webbrowser from wsgiref import simple_server import requests import sys from openidc_client import release # The ports that we will try to use for our webserver WEB_PORTS = [12345, 23456] class OpenIDCClient(object): # Internal implementation of tokens: # Every app id has its own token cache # The token cache is a json serialized dict # This dict contains uuid: token pairs # Every "token" object is a json dict with the following keys: # idp: The URL of the idp that issued the token # sub: The subject that owns the token # access_token: Token value # token_type: Token type. Currently supported: "Bearer" # expires_at: Token expiration UTC time. NOTE: Even if the expires_at # indicates the token should still be valid, it may have been revoked by # the user! Also, even if it has expired, we might still be able to # refresh the token. # refresh_token: The token we can use to refresh the access token # scopes: A list of scopes that we had requested with the token def __init__(self, app_identifier, id_provider, id_provider_mapping, client_id, client_secret=None, use_post=False, useragent=None, cachedir=None, printfd=sys.stdout): """Client for interacting with web services relying on OpenID Connect. :param app_identifier: Identifier for storage of retrieved tokens :param id_provider: URL of the identity provider to get tokens from :param id_provider_mapping: Mapping with URLs to use for specific endpoints on the IdP. :kwarg use_post: Whether to use POST submission of client secrets rather than Authorization header :kwarg client_id: The Client Identifier used to request credentials :kwarg client_secret: The client "secret" that goes with the client_id. May be None if your IdP does not require you to use a secret. :kwarg useragent: Useragent string to use. If not provided, defaults to "python-openidc-client/VERSION" :kwarg cachedir: The directory in which to store the token caches. Will be put through expanduer. Default is ~/.openidc. If this does not exist and we are unable to create it, the OSError will be thrown. :kwargs printfd: The File object to print token instructions to. """ self.logger = logging.getLogger(__name__) self.debug = self.logger.debug self.app_id = app_identifier self.use_post = use_post self.idp = id_provider self.idp_mapping = id_provider_mapping self.client_id = client_id self.client_secret = client_secret self.useragent = useragent or 'python-openid-client/%s' % \ release.VERSION self.cachedir = os.path.expanduser(cachedir or '~/.openidc') self.last_returned_uuid = None self.problem_reported = False self.token_to_try = None self._retrieved_code = None # TODO: Make cache_lock a filesystem lock so we also lock across # multiple invocations self._cache_lock = Lock() with self._cache_lock: self.__refresh_cache() self._valid_cache = [] self._printfd = printfd def get_token(self, scopes, new_token=True): """Function to retrieve tokens with specific scopes. This function will block until a token is retrieved if requested. It is always safe to call this though, since if we already have a token with the current app_identifier that has the required scopes, we will return it. This function will return a bearer token or None. Note that the bearer token might have been revoked by the user or expired. In that case, you will want to call report_token_issue() to try to renew the token or delete the token. :kwarg scopes: A list of scopes required for the current client. :kwarg new_token: If True, we will actively request the user to get a new token with the current scopeset if we do not already have on. :rtype: string or None :returns: String bearer token if possible or None """ if not isinstance(scopes, list): raise ValueError('Scopes must be a list') token = self._get_token_with_scopes(scopes) if token: # If we had a valid token, use that self.last_returned_uuid = token[0] self.problem_reported = False return token[1]['access_token'] elif not new_token: return None # We did not have a valid token, now comes the hard part... uuid = self._get_new_token(scopes) if uuid: self.last_returned_uuid = uuid self.problem_reported = False return self._cache[uuid]['access_token'] def report_token_issue(self): """Report an error with the last token that was returned. This will attempt to renew the token that was last returned. If that worked, we will return the new access token. If it did not work, we will return None and remove this token from the cache. If you get an indication from your application that the token you sent was invalid, you should call it. You should explicitly NOT call this function if the token was valid but your request failed due to a server error or because the account or token was lacking specific permissions. """ if not self.last_returned_uuid: raise Exception('Cannot report issue before requesting token') if self.problem_reported: # We were reported an issue before. Let's just remove this token. self._delete_token(self.last_returned_uuid) return None refresh_result = self._refresh_token(self.last_returned_uuid) if not refresh_result: self._delete_token(self.last_returned_uuid) return None else: self.problem_reported = True return self._cache[self.last_returned_uuid]['access_token'] def send_request(self, *args, **kwargs): """Make an python-requests POST request. Allarguments and keyword arguments are like the arguments to requests, except for `scopes`, `new_token` and `auto_refresh` keyword arguments. `scopes` is required. :kwarg scopes: Scopes required for this call. If a token is not present with this token, a new one will be requested unless nonblocking is True. :kwarg new_token: If True, we will actively request the user to get a new token with the current scopeset if we do not already have on. :kwarg auto_refresh: If False, will not try to automatically report token issues on 401. This helps with broken apps that may send a 401 return code in incorrect cases. :kwargs http_method: The HTTP method to use, defaults to POST.. """ ckwargs = copy(kwargs) scopes = ckwargs.pop('scopes') new_token = ckwargs.pop('new_token', True) auto_refresh = ckwargs.pop('auto_refresh', True) method = ckwargs.pop('http_method', 'POST') is_retry = False if self.token_to_try: is_retry = True token = self.token_to_try self.token_to_try = None else: token = self.get_token(scopes, new_token=new_token) if not token: return None if self.use_post: if 'json' in ckwargs: raise ValueError('Cannot provide json in a post call') if method not in ['POST']: raise ValueError('Cannot use POST tokens in %s method' % method) if 'data' not in ckwargs: ckwargs['data'] = {} ckwargs['data']['access_token'] = token else: if 'headers' not in ckwargs: ckwargs['headers'] = {} ckwargs['headers']['Authorization'] = 'Bearer %s' % token resp = requests.request(method, *args, **ckwargs) if resp.status_code == 401 and not is_retry: if not auto_refresh: return resp self.token_to_try = self.report_token_issue() if not self.token_to_try: return resp return self.send_request(*args, **kwargs) elif resp.status_code == 401: # We got a 401 and this is a retry. Report error self.report_token_issue() return resp else: return resp @property def _cachefile(self): """Property to get the cache file name for the current client. This assures that whenever this file is touched, the cache lock is held """ assert self._cache_lock.locked() return os.path.join(self.cachedir, 'oidc_%s.json' % self.app_id) def __refresh_cache(self): """Refreshes the self._cache from the cache on disk. Requires cache_lock to be held by caller.""" assert self._cache_lock.locked() self.debug('Refreshing cache') if not os.path.isdir(self.cachedir): self.debug('Creating directory') os.makedirs(self.cachedir) if not os.path.exists(self._cachefile): self.debug('Creating file') with open(self._cachefile, 'w') as f: f.write(json.dumps({})) with open(self._cachefile, 'r') as f: self._cache = json.loads(f.read()) self.debug('Loaded %i tokens', len(self._cache)) def _refresh_cache(self): """Refreshes the self._cache from the cache on disk. cache_lock may not be held by anyone.""" with self._cache_lock: self.__refresh_cache() def __write_cache(self): """Wirtes self._cache to cache on disk. Requires cache_lock to be held by caller.""" assert self._cache_lock.locked() self.debug('Writing cache with %i tokens', len(self._cache)) with open(self._cachefile, 'w') as f: f.write(json.dumps(self._cache)) def _add_token(self, token): """Adds a token to the cache and writes cache to disk. cache_lock may not be held by anyone. :param token: Dict of the token to be added to the cache """ uuid = uuidgen().hex self.debug('Adding token %s to cache', uuid) with self._cache_lock: self.__refresh_cache() self._cache[uuid] = token self.__write_cache() return uuid def _update_token(self, uuid, toupdate): """Updates a token in the cache. cache_lock may not be held by anyone. :param token: UUID of the token to be updated :param toupdate: Dict indicating which fields need to be updated """ self.debug('Updating token %s in cache, fields %s', uuid, toupdate.keys()) with self._cache_lock: self.__refresh_cache() if uuid not in self._cache: return None self._cache[uuid].update(toupdate) self.__write_cache() return uuid def _delete_token(self, uuid): """Removes a token from the cache and writes cache to disk. cache_lock may not be held by anyone. :param uuid: UUID of the token to be removed from cache """ self.debug('Removing token %s from cache', uuid) with self._cache_lock: self.__refresh_cache() if uuid in self._cache: self.debug('Removing token') del self._cache[uuid] self.__write_cache() else: self.debug('Token was already gone') def _get_token_with_scopes(self, scopes): """Searches the cache for any tokens that have the requested scopes. It will prefer to return tokens whose expires_at is still before the current time, but if no such tokens exist it will return the possibly expired token: it might be refreshable. :param scopes: List of scopes that need to be in the returned token :rtype: (string, dict) or None :returns: Token UUID and contents or None if no applicable tokens were found """ possible_token = None self.debug('Trying to get token with scopes %s', scopes) for uuid in self._cache: self.debug('Checking %s', uuid) token = self._cache[uuid] if token['idp'] != self.idp: self.debug('Incorrect idp') continue if not set(scopes).issubset(set(token['scopes'])): self.debug('Missing scope: %s not subset of %s', set(scopes), set(token['scopes'])) continue if token['expires_at'] < time.time(): # This is a token that's supposed to still be valid, prefer it # over any others we have self.debug('Not yet expired, returning') return uuid, token # This is a token that may or may not still be valid self.debug('Possible') possible_token = (uuid, token) if possible_token: self.debug('Returning possible token') return possible_token def _idp_url(self, method): """Returns the IdP URL for the requested method. :param method: The method name in the IdP mapping dict. :rtype: string :returns: The IdP URL """ if method in self.idp_mapping: return self.idp + self.idp_mapping[method] else: return ValueError('Idp Mapping did not include path for %s' % method) def _refresh_token(self, uuid): """Tries to refresh a token and put the refreshed token in self._cache The caller is responsible for either removing the token if it could not be refreshed or saving the cache if renewal was succesful. :param uuid: The UUID of the cached token to attempt to refresh. :rtype: bool :returns: True if the token was succesfully refreshed, False otherwise """ oldtoken = self._cache[uuid] self.debug('Refreshing token %s', uuid) data = {'client_id': self.client_id, 'grant_type': 'refresh_token', 'refresh_token': oldtoken['refresh_token']} if self.client_secret: data['client_secret'] = self.client_secret resp = requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() resp = resp.json() if 'error' in resp: self.debug('Unable to refresh, error: %s', resp['error']) return False self._update_token( uuid, {'access_token': resp['access_token'], 'token_type': resp['token_type'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() + resp['expires_in']}) self.debug('Refreshed until %s', self._cache[uuid]['expires_at']) return True def _get_server(self, app): """This function returns a SimpleServer with an available WEB_PORT.""" for port in WEB_PORTS: try: server = simple_server.make_server('0.0.0.0', port, app) return server except socket.error: # This port did not work. Switch to next one continue def _get_new_token(self, scopes): """This function kicks off some magic. We will start a new webserver on one of the WEB_PORTS, and then either show the user a URL, or if possible, kick off their browser. This URL will be the Authorization endpoint of the IdP with a request for our client_id to get a new token with the specified scopes. The webserver will then need to catch the return with either an Authorization Code (that we will exchange for an access token) or the cancellation message. This function will store the new token in the local cache, add it to the valid cache, and then return the UUID. If the user cancelled (or we got another error), we will return None. """ def _token_app(environ, start_response): query = environ['QUERY_STRING'] split = query.split('&') kv = dict([v.split('=', 1) for v in split]) if 'error' in kv: self.debug('Error code returned: %s (%s)', kv['error'], kv.get('error_description')) self._retrieved_code = False else: self._retrieved_code = kv['code'] # Just return a message start_response('200 OK', [('Content-Type', 'text/plain')]) return [u'You can close this window and return to the CLI'.encode('ascii')] self._retrieved_code = None server = self._get_server(_token_app) if not server: raise Exception('We were unable to instantiate a webserver') return_uri = 'http://localhost:%i/' % server.socket.getsockname()[1] rquery = {} rquery['scope'] = ' '.join(scopes) rquery['response_type'] = 'code' rquery['client_id'] = self.client_id rquery['redirect_uri'] = return_uri rquery['response_mode'] = 'query' query = urlencode(rquery) authz_url = '%s?%s' % (self._idp_url('Authorization'), query) print('Please visit %s to grant authorization' % authz_url, file=self._printfd) webbrowser.open(authz_url) server.handle_request() server.server_close() assert self._retrieved_code is not None if self._retrieved_code is False: # The user cancelled the request self._retrieved_code = None self.debug('User cancelled') return None self.debug('We got an authorization code!') data = {'client_id': self.client_id, 'grant_type': 'authorization_code', 'redirect_uri': return_uri, 'code': self._retrieved_code} if self.client_secret: data['client_secret'] = self.client_secret resp = requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() self._retrieved_code = None resp = resp.json() if 'error' in resp: self.debug('Error exchanging authorization code: %s', resp['error']) return None token = {'access_token': resp['access_token'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() + int(resp['expires_in']), 'idp': self.idp, 'token_type': resp['token_type'], 'scopes': scopes} # AND WE ARE DONE! \o/ return self._add_token(token)
[((93, 22, 93, 49), 'logging.getLogger', 'logging.getLogger', ({(93, 40, 93, 48): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((104, 24, 104, 68), 'os.path.expanduser', 'os.path.expanduser', ({(104, 43, 104, 67): "cachedir or '~/.openidc'"}, {}), "(cachedir or '~/.openidc')", False, 'import os\n'), ((111, 27, 111, 33), 'threading.Lock', 'Lock', ({}, {}), '()', False, 'from threading import Lock\n'), ((200, 18, 200, 30), 'copy.copy', 'copy', ({(200, 23, 200, 29): 'kwargs'}, {}), '(kwargs)', False, 'from copy import copy\n'), ((232, 15, 232, 57), 'requests.request', 'requests.request', ({(232, 32, 232, 38): 'method', (232, 40, 232, 45): '*args'}, {}), '(method, *args, **ckwargs)', False, 'import requests\n'), ((255, 15, 255, 72), 'os.path.join', 'os.path.join', ({(255, 28, 255, 41): 'self.cachedir', (255, 43, 255, 71): "('oidc_%s.json' % self.app_id)"}, {}), "(self.cachedir, 'oidc_%s.json' % self.app_id)", False, 'import os\n'), ((478, 16, 478, 33), 'urllib.parse.urlencode', 'urlencode', ({(478, 26, 478, 32): 'rquery'}, {}), '(rquery)', False, 'from urllib.parse import urlencode\n'), ((482, 8, 482, 34), 'webbrowser.open', 'webbrowser.open', ({(482, 24, 482, 33): 'authz_url'}, {}), '(authz_url)', False, 'import webbrowser\n'), ((263, 15, 263, 43), 'os.path.isdir', 'os.path.isdir', ({(263, 29, 263, 42): 'self.cachedir'}, {}), '(self.cachedir)', False, 'import os\n'), ((265, 12, 265, 38), 'os.makedirs', 'os.makedirs', ({(265, 24, 265, 37): 'self.cachedir'}, {}), '(self.cachedir)', False, 'import os\n'), ((266, 15, 266, 46), 'os.path.exists', 'os.path.exists', ({(266, 30, 266, 45): 'self._cachefile'}, {}), '(self._cachefile)', False, 'import os\n'), ((297, 15, 297, 24), 'uuid.uuid4', 'uuidgen', ({}, {}), '()', True, 'from uuid import uuid4 as uuidgen\n'), ((288, 20, 288, 43), 'json.dumps', 'json.dumps', ({(288, 31, 288, 42): 'self._cache'}, {}), '(self._cache)', False, 'import json\n'), ((365, 37, 365, 48), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((430, 25, 430, 72), 'wsgiref.simple_server.make_server', 'simple_server.make_server', ({(430, 51, 430, 60): '"""0.0.0.0"""', (430, 62, 430, 66): 'port', (430, 68, 430, 71): 'app'}, {}), "('0.0.0.0', port, app)", False, 'from wsgiref import simple_server\n'), ((514, 31, 514, 42), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((269, 24, 269, 38), 'json.dumps', 'json.dumps', ({(269, 35, 269, 37): '{}'}, {}), '({})', False, 'import json\n'), ((422, 27, 422, 38), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n')]
constantinius/eoxserver_combined
eoxserver/services/ows/wps/v10/encoders/parameters.py
68f261133fed65a4e8a6ddba82b0d2845171e4bf
#------------------------------------------------------------------------------- # # WPS 1.0 parameters' XML encoders # # Project: EOxServer <http://eoxserver.org> # Authors: Fabian Schindler <[email protected]> # Martin Paces <[email protected]> # #------------------------------------------------------------------------------- # Copyright (C) 2013 EOX IT Services GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies of this Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. #------------------------------------------------------------------------------- from eoxserver.services.ows.wps.parameters import ( LiteralData, ComplexData, BoundingBoxData, AllowedAny, AllowedEnum, AllowedRange, AllowedRangeCollection, AllowedByReference, ) from eoxserver.services.ows.wps.v10.util import ( OWS, WPS, NIL, ns_ows, ) #------------------------------------------------------------------------------- def encode_input_descr(prm): """ Encode process description input.""" elem = NIL("Input", *_encode_param_common(prm)) elem.attrib["minOccurs"] = ("1", "0")[bool(prm.is_optional)] elem.attrib["maxOccurs"] = "1" if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, True)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, True)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, True)) return elem def encode_output_descr(prm): """ Encode process description output.""" elem = NIL("Output", *_encode_param_common(prm)) if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, False)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, False)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, False)) return elem def encode_input_exec(prm): """ Encode common part of the execure response data input.""" return WPS("Input", *_encode_param_common(prm, False)) def encode_output_exec(prm): """ Encode common part of the execure response data output.""" return WPS("Output", *_encode_param_common(prm)) def encode_output_def(outdef): """ Encode the execure response output definition.""" attrib = {} if outdef.uom is not None: attrib['uom'] = outdef.uom if outdef.crs is not None: attrib['crs'] = outdef.crs if outdef.mime_type is not None: attrib['mimeType'] = outdef.mime_type if outdef.encoding is not None: attrib['encoding'] = outdef.encoding if outdef.schema is not None: attrib['schema'] = outdef.schema if outdef.as_reference is not None: attrib['asReference'] = 'true' if outdef.as_reference else 'false' return WPS("Output", *_encode_param_common(outdef, False), **attrib) def _encode_param_common(prm, title_required=True): """ Encode common sub-elements of all XML parameters.""" elist = [OWS("Identifier", prm.identifier)] if prm.title or title_required: elist.append(OWS("Title", prm.title or prm.identifier)) if prm.abstract: elist.append(OWS("Abstract", prm.abstract)) return elist #------------------------------------------------------------------------------- def _encode_literal(prm, is_input): dtype = prm.dtype elem = NIL("LiteralData" if is_input else "LiteralOutput") elem.append(OWS("DataType", dtype.name, **{ ns_ows("reference"): "http://www.w3.org/TR/xmlschema-2/#%s"%dtype.name, })) if prm.uoms: elem.append(NIL("UOMs", NIL("Default", OWS("UOM", prm.uoms[0])), NIL("Supported", *[OWS("UOM", u) for u in prm.uoms]) )) if is_input: elem.append(_encode_allowed_value(prm.allowed_values)) if prm.default is not None: elem.append(NIL("DefaultValue", str(prm.default))) return elem def _encode_allowed_value(avobj): enum, ranges, elist = None, [], [] if isinstance(avobj, AllowedAny): return OWS("AnyValue") elif isinstance(avobj, AllowedByReference): return WPS("ValuesReference", **{ ns_ows("reference"): avobj.url, "valuesForm": avobj.url, }) elif isinstance(avobj, AllowedEnum): enum = avobj elif isinstance(avobj, AllowedRange): ranges = [avobj] elif isinstance(avobj, AllowedRangeCollection): enum, ranges = avobj.enum, avobj.ranges else: raise TypeError("Invalid allowed value object! OBJ=%r"%avobj) dtype = avobj.dtype ddtype = dtype.get_diff_dtype() if enum is not None: elist.extend(OWS("Value", dtype.encode(v)) for v in enum.values) for range_ in ranges: attr, elms = {}, [] if range_.closure != 'closed': attr = {ns_ows("rangeClosure"): range_.closure} if range_.minval is not None: elms.append(OWS("MinimumValue", dtype.encode(range_.minval))) if range_.maxval is not None: elms.append(OWS("MaximumValue", dtype.encode(range_.maxval))) if range_.spacing is not None: elms.append(OWS("Spacing", ddtype.encode(range_.spacing))) elist.append(OWS("Range", *elms, **attr)) return OWS("AllowedValues", *elist) #------------------------------------------------------------------------------- def _encode_complex(prm, is_input): return NIL("ComplexData" if is_input else "ComplexOutput", NIL("Default", _encode_format(prm.default_format)), NIL("Supported", *[_encode_format(f) for f in prm.formats.itervalues()]) ) def _encode_format(frmt): elem = NIL("Format", NIL("MimeType", frmt.mime_type)) if frmt.encoding is not None: elem.append(NIL("Encoding", frmt.encoding)) if frmt.schema is not None: elem.append(NIL("Schema", frmt.schema)) return elem #------------------------------------------------------------------------------- def _encode_bbox(prm, is_input): return NIL("BoundingBoxData" if is_input else "BoundingBoxOutput", NIL("Default", NIL("CRS", prm.encode_crs(prm.default_crs))), NIL("Supported", *[NIL("CRS", prm.encode_crs(crs)) for crs in prm.crss]) )
[((105, 11, 105, 62), 'eoxserver.services.ows.wps.v10.util.NIL', 'NIL', ({(105, 15, 105, 61): "'LiteralData' if is_input else 'LiteralOutput'"}, {}), "('LiteralData' if is_input else 'LiteralOutput')", False, 'from eoxserver.services.ows.wps.v10.util import OWS, WPS, NIL, ns_ows\n'), ((161, 11, 161, 39), 'eoxserver.services.ows.wps.v10.util.OWS', 'OWS', ({(161, 15, 161, 30): '"""AllowedValues"""', (161, 32, 161, 38): '*elist'}, {}), "('AllowedValues', *elist)", False, 'from eoxserver.services.ows.wps.v10.util import OWS, WPS, NIL, ns_ows\n'), ((94, 13, 94, 46), 'eoxserver.services.ows.wps.v10.util.OWS', 'OWS', ({(94, 17, 94, 29): '"""Identifier"""', (94, 31, 94, 45): 'prm.identifier'}, {}), "('Identifier', prm.identifier)", False, 'from eoxserver.services.ows.wps.v10.util import OWS, WPS, NIL, ns_ows\n'), ((129, 15, 129, 30), 'eoxserver.services.ows.wps.v10.util.OWS', 'OWS', ({(129, 19, 129, 29): '"""AnyValue"""'}, {}), "('AnyValue')", False, 'from eoxserver.services.ows.wps.v10.util import OWS, WPS, NIL, ns_ows\n'), ((172, 25, 172, 56), 'eoxserver.services.ows.wps.v10.util.NIL', 'NIL', ({(172, 29, 172, 39): '"""MimeType"""', (172, 41, 172, 55): 'frmt.mime_type'}, {}), "('MimeType', frmt.mime_type)", False, 'from eoxserver.services.ows.wps.v10.util import OWS, WPS, NIL, ns_ows\n'), ((96, 21, 96, 62), 'eoxserver.services.ows.wps.v10.util.OWS', 'OWS', ({(96, 25, 96, 32): '"""Title"""', (96, 34, 96, 61): '(prm.title or prm.identifier)'}, {}), "('Title', prm.title or prm.identifier)", False, 'from eoxserver.services.ows.wps.v10.util import OWS, WPS, NIL, ns_ows\n'), ((98, 21, 98, 50), 'eoxserver.services.ows.wps.v10.util.OWS', 'OWS', ({(98, 25, 98, 35): '"""Abstract"""', (98, 37, 98, 49): 'prm.abstract'}, {}), "('Abstract', prm.abstract)", False, 'from eoxserver.services.ows.wps.v10.util import OWS, WPS, NIL, ns_ows\n'), ((159, 21, 159, 48), 'eoxserver.services.ows.wps.v10.util.OWS', 'OWS', ({(159, 25, 159, 32): '"""Range"""', (159, 34, 159, 39): '*elms'}, {}), "('Range', *elms, **attr)", False, 'from eoxserver.services.ows.wps.v10.util import OWS, WPS, NIL, ns_ows\n'), ((174, 20, 174, 50), 'eoxserver.services.ows.wps.v10.util.NIL', 'NIL', ({(174, 24, 174, 34): '"""Encoding"""', (174, 36, 174, 49): 'frmt.encoding'}, {}), "('Encoding', frmt.encoding)", False, 'from eoxserver.services.ows.wps.v10.util import OWS, WPS, NIL, ns_ows\n'), ((176, 20, 176, 46), 'eoxserver.services.ows.wps.v10.util.NIL', 'NIL', ({(176, 24, 176, 32): '"""Schema"""', (176, 34, 176, 45): 'frmt.schema'}, {}), "('Schema', frmt.schema)", False, 'from eoxserver.services.ows.wps.v10.util import OWS, WPS, NIL, ns_ows\n'), ((152, 20, 152, 42), 'eoxserver.services.ows.wps.v10.util.ns_ows', 'ns_ows', ({(152, 27, 152, 41): '"""rangeClosure"""'}, {}), "('rangeClosure')", False, 'from eoxserver.services.ows.wps.v10.util import OWS, WPS, NIL, ns_ows\n'), ((108, 8, 108, 27), 'eoxserver.services.ows.wps.v10.util.ns_ows', 'ns_ows', ({(108, 15, 108, 26): '"""reference"""'}, {}), "('reference')", False, 'from eoxserver.services.ows.wps.v10.util import OWS, WPS, NIL, ns_ows\n'), ((113, 27, 113, 50), 'eoxserver.services.ows.wps.v10.util.OWS', 'OWS', ({(113, 31, 113, 36): '"""UOM"""', (113, 38, 113, 49): 'prm.uoms[0]'}, {}), "('UOM', prm.uoms[0])", False, 'from eoxserver.services.ows.wps.v10.util import OWS, WPS, NIL, ns_ows\n'), ((132, 12, 132, 31), 'eoxserver.services.ows.wps.v10.util.ns_ows', 'ns_ows', ({(132, 19, 132, 30): '"""reference"""'}, {}), "('reference')", False, 'from eoxserver.services.ows.wps.v10.util import OWS, WPS, NIL, ns_ows\n'), ((114, 31, 114, 44), 'eoxserver.services.ows.wps.v10.util.OWS', 'OWS', ({(114, 35, 114, 40): '"""UOM"""', (114, 42, 114, 43): 'u'}, {}), "('UOM', u)", False, 'from eoxserver.services.ows.wps.v10.util import OWS, WPS, NIL, ns_ows\n')]
ClinGen/gene-and-variant-curation-tools
gci-vci-serverless/src/helpers/vp_saves_helpers.py
30f21d8f03d8b5c180c1ce3cb8401b5abc660080
import datetime import uuid import simplejson as json from src.db.s3_client import Client as S3Client from decimal import Decimal def get_from_archive(archive_key): ''' Download a VP Save from S3. :param str archive_key: The vp_save data's location (S3 bucket and file path). This value is required. ''' if archive_key is None or '/' not in archive_key: raise ValueError() bucket, key = archive_key.split('/', 1) s3_client = S3Client() try: archive_object = json.loads(s3_client.get_object(bucket, key)['Body'].read(),parse_float=Decimal) except Exception as e: print('ERROR: Error downloading ' + key + ' from ' + bucket + ' bucket. ERROR\n%s' %e) raise return archive_object def build(vp_save={}): ''' Builds and returns a valid vp_save object. Builds a new vp_save object by creating default values for required fields and combines any of the given attributes. ''' vp_save['PK'] = str(uuid.uuid4()) # Set timestamps (for new data) now = datetime.datetime.now().isoformat() vp_save['date_created'] = now vp_save['last_modified'] = now vp_save['item_type'] = 'vp_save' return vp_save def archive(bucket, vp_save_pk, save_data): ''' Archives a vp save data to S3. Uploads the save data object as a JSON file to S3. The location of the archive depends on the bucket and the primary key of the save data. If the upload fails, an exception is raised. If successful, returns the archive location. :param str bucket: The name of the S3 bucket for the archive. This value is required. :param str vp_save_pk: The vp_save PK to use as the name of the JSON file. This value is required. :param obj save_data: The save data object to archive. This value is required. ''' if bucket is None or len(bucket) <= 0: raise ValueError() if vp_save_pk is None or len(vp_save_pk) <= 0: raise ValueError() if not save_data: raise ValueError() archive_file = __archive_key(save_data) + '/' + vp_save_pk + '.json' # Upload curation data to S3 archive bucket. s3_client = S3Client() try: s3_client.put_object( bytes(json.dumps(save_data).encode('UTF-8')), bucket, archive_file ) except Exception as e: print('ERROR: Error uploading ' + archive_file + ' to ' + bucket + ' bucket. ERROR\n%s' %e) raise archive_key_comps = [bucket, archive_file] return '/'.join(archive_key_comps) def __archive_key(save_data): return save_data['PK']
[((20, 14, 20, 24), 'src.db.s3_client.Client', 'S3Client', ({}, {}), '()', True, 'from src.db.s3_client import Client as S3Client\n'), ((73, 14, 73, 24), 'src.db.s3_client.Client', 'S3Client', ({}, {}), '()', True, 'from src.db.s3_client import Client as S3Client\n'), ((38, 22, 38, 34), 'uuid.uuid4', 'uuid.uuid4', ({}, {}), '()', False, 'import uuid\n'), ((41, 8, 41, 31), 'datetime.datetime.now', 'datetime.datetime.now', ({}, {}), '()', False, 'import datetime\n'), ((77, 12, 77, 33), 'simplejson.dumps', 'json.dumps', ({(77, 23, 77, 32): 'save_data'}, {}), '(save_data)', True, 'import simplejson as json\n')]
ruhugu/brokenaxes
docs/source/auto_examples/plot_usage.py
1cfb301c854b3336aeb4dd9a2c329310534dfb21
""" Basic usage =========== This example presents the basic usage of brokenaxes """ import matplotlib.pyplot as plt from brokenaxes import brokenaxes import numpy as np fig = plt.figure(figsize=(5,2)) bax = brokenaxes(xlims=((0, .1), (.4, .7)), ylims=((-1, .7), (.79, 1)), hspace=.05) x = np.linspace(0, 1, 100) bax.plot(x, np.sin(10 * x), label='sin') bax.plot(x, np.cos(10 * x), label='cos') bax.legend(loc=3) bax.set_xlabel('time') bax.set_ylabel('value')
[((14, 6, 14, 31), 'matplotlib.pyplot.figure', 'plt.figure', (), '', True, 'import matplotlib.pyplot as plt\n'), ((15, 6, 15, 83), 'brokenaxes.brokenaxes', 'brokenaxes', (), '', False, 'from brokenaxes import brokenaxes\n'), ((16, 4, 16, 26), 'numpy.linspace', 'np.linspace', ({(16, 16, 16, 17): '0', (16, 19, 16, 20): '1', (16, 22, 16, 25): '100'}, {}), '(0, 1, 100)', True, 'import numpy as np\n'), ((17, 12, 17, 26), 'numpy.sin', 'np.sin', ({(17, 19, 17, 25): '(10 * x)'}, {}), '(10 * x)', True, 'import numpy as np\n'), ((18, 12, 18, 26), 'numpy.cos', 'np.cos', ({(18, 19, 18, 25): '(10 * x)'}, {}), '(10 * x)', True, 'import numpy as np\n')]
toyo-bunko/paper_app
src/entity/002_createRdf.py
f988e05cf83711d98c5ed735c0fd74fcf11e0f05
import shutil import os import json import glob import yaml import sys import urllib import ssl import csv import time import requests import json import csv from rdflib import URIRef, BNode, Literal, Graph from rdflib.namespace import RDF, RDFS, FOAF, XSD from rdflib import Namespace all = Graph() with open("data/dict.json") as f: ln_map = json.load(f) st_path = "../data/index.json" with open(st_path) as f: result = json.load(f) uris = [] for obj in result: fields = ["spatial", "agential"] for field in fields: values = obj[field] for value in values: uri = "chname:"+value if field == "spatial": uri = "place:"+value if uri not in uris: uris.append(uri) for uri in uris: print(uri) tmp = uri.split(":") prefix = tmp[0] suffix = tmp[1] ln = suffix ln_org = "" if ln in ln_map: ln_org = ln ln = ln_map[ln] if len(ln) > 20: continue # ln = obj["uri"].split(":")[1] ''' wiki_path = "data/wikidata/"+ln+".json" wiki = {} if os.path.exists(wiki_path): with open(wiki_path) as f: wiki = json.load(f) # sameAs stmt = (subject, URIRef("http://www.w3.org/2002/07/owl#sameAs"), URIRef(wiki_url)) all.add(stmt) obj = wiki["entities"][wiki_url.split("/")[-1]] # description if "descriptions" in obj and "ja" in obj["descriptions"]: stmt = (subject, URIRef("http://schema.org/description"), Literal(obj["descriptions"]["ja"]["value"], lang="ja")) all.add(stmt) # label if "labels" in obj and "ja" in obj["labels"]: stmt = (subject, RDFS.label, Literal(obj["labels"]["ja"]["value"])) all.add(stmt) ln = wiki_url.split("/")[-1] ''' db_path = "data/dbpedia_ja/"+ln+".json" wiki_path = "data/wikidata/"+ln+".json" db = {} wiki = {} if os.path.exists(db_path): with open(db_path) as f: db = json.load(f) if os.path.exists(wiki_path): with open(wiki_path) as f: wiki = json.load(f) db_uri = "http://ja.dbpedia.org/resource/"+ln if db_uri not in db: print("not" , db_uri) continue # ###### subject = URIRef("https://shibusawa-dlab.github.io/lab1/api/"+prefix+"/"+ln) if prefix == "chname": stmt = (subject, RDF.type, URIRef("https://jpsearch.go.jp/term/type/Agent")) all.add(stmt) elif prefix == "time": stmt = (subject, RDF.type, URIRef("https://jpsearch.go.jp/term/type/Time")) all.add(stmt) elif prefix == "place": stmt = (subject, RDF.type, URIRef("https://jpsearch.go.jp/term/type/Place")) all.add(stmt) elif prefix == "event": stmt = (subject, RDF.type, URIRef("https://jpsearch.go.jp/term/type/Event")) all.add(stmt) elif prefix == "org": stmt = (subject, RDF.type, URIRef("https://jpsearch.go.jp/term/type/Organization")) all.add(stmt) elif prefix == "keyword": stmt = (subject, RDF.type, URIRef("https://jpsearch.go.jp/term/type/Keyword")) all.add(stmt) elif prefix == "type": stmt = (subject, RDF.type, URIRef("https://jpsearch.go.jp/term/type/Type")) all.add(stmt) # ###### obj = db[db_uri] stmt = (subject, URIRef("http://www.w3.org/2002/07/owl#sameAs"), URIRef(db_uri)) all.add(stmt) if "http://dbpedia.org/ontology/thumbnail" in obj: stmt = (subject, URIRef("http://schema.org/image"), URIRef(obj["http://dbpedia.org/ontology/thumbnail"][0]["value"])) all.add(stmt) if "http://www.w3.org/2000/01/rdf-schema#label" in obj: labels = obj["http://www.w3.org/2000/01/rdf-schema#label"] for label in labels: if label["lang"] == "ja": stmt = (subject, RDFS.label, Literal(label["value"])) all.add(stmt) if "http://www.w3.org/2000/01/rdf-schema#comment" in obj: labels = obj["http://www.w3.org/2000/01/rdf-schema#comment"] for label in labels: stmt = (subject, URIRef("http://schema.org/description"), Literal(label["value"], lang=label["lang"])) all.add(stmt) if "http://www.w3.org/2002/07/owl#sameAs" in obj: labels = obj["http://www.w3.org/2002/07/owl#sameAs"] for label in labels: value = label["value"] if "http://dbpedia.org" in value or "http://ja.dbpedia.org" in value or "www.wikidata.org" in value: stmt = (subject, URIRef("http://www.w3.org/2002/07/owl#sameAs"), URIRef(value)) all.add(stmt) # 位置情報 ''' if "point" in obj and prefix == "place": value = obj["point"]["value"].split(" ") # addGeo関数 geoUri = addGeo({ "lat" : float(value[0]), "long": float(value[1]) }) stmt = (subject, URIRef("http://schema.org/geo"), geoUri) if suffix not in places: places[suffix] = { "lat" : float(value[0]), "long": float(value[1]) } all.add(stmt) ''' # 正規化前 if ln_org != "" and ln != ln_org: stmt = (subject, URIRef("http://schema.org/name"), Literal(ln_org)) all.add(stmt) path = "data/all.json" all.serialize(destination=path, format='json-ld') all.serialize(destination=path.replace(".json", ".rdf"), format='pretty-xml')
[((20, 6, 20, 13), 'rdflib.Graph', 'Graph', ({}, {}), '()', False, 'from rdflib import URIRef, BNode, Literal, Graph\n'), ((23, 13, 23, 25), 'json.load', 'json.load', ({(23, 23, 23, 24): 'f'}, {}), '(f)', False, 'import json\n'), ((28, 13, 28, 25), 'json.load', 'json.load', ({(28, 23, 28, 24): 'f'}, {}), '(f)', False, 'import json\n'), ((105, 11, 105, 34), 'os.path.exists', 'os.path.exists', ({(105, 26, 105, 33): 'db_path'}, {}), '(db_path)', False, 'import os\n'), ((109, 11, 109, 36), 'os.path.exists', 'os.path.exists', ({(109, 26, 109, 35): 'wiki_path'}, {}), '(wiki_path)', False, 'import os\n'), ((120, 18, 120, 84), 'rdflib.URIRef', 'URIRef', ({(120, 25, 120, 83): "'https://shibusawa-dlab.github.io/lab1/api/' + prefix + '/' + ln"}, {}), "('https://shibusawa-dlab.github.io/lab1/api/' + prefix + '/' + ln)", False, 'from rdflib import URIRef, BNode, Literal, Graph\n'), ((147, 25, 147, 71), 'rdflib.URIRef', 'URIRef', ({(147, 32, 147, 70): '"""http://www.w3.org/2002/07/owl#sameAs"""'}, {}), "('http://www.w3.org/2002/07/owl#sameAs')", False, 'from rdflib import URIRef, BNode, Literal, Graph\n'), ((147, 73, 147, 87), 'rdflib.URIRef', 'URIRef', ({(147, 80, 147, 86): 'db_uri'}, {}), '(db_uri)', False, 'from rdflib import URIRef, BNode, Literal, Graph\n'), ((107, 21, 107, 33), 'json.load', 'json.load', ({(107, 31, 107, 32): 'f'}, {}), '(f)', False, 'import json\n'), ((111, 23, 111, 35), 'json.load', 'json.load', ({(111, 33, 111, 34): 'f'}, {}), '(f)', False, 'import json\n'), ((123, 39, 123, 87), 'rdflib.URIRef', 'URIRef', ({(123, 46, 123, 86): '"""https://jpsearch.go.jp/term/type/Agent"""'}, {}), "('https://jpsearch.go.jp/term/type/Agent')", False, 'from rdflib import URIRef, BNode, Literal, Graph\n'), ((151, 29, 151, 62), 'rdflib.URIRef', 'URIRef', ({(151, 36, 151, 61): '"""http://schema.org/image"""'}, {}), "('http://schema.org/image')", False, 'from rdflib import URIRef, BNode, Literal, Graph\n'), ((151, 64, 151, 128), 'rdflib.URIRef', 'URIRef', ({(151, 71, 151, 127): "obj['http://dbpedia.org/ontology/thumbnail'][0]['value']"}, {}), "(obj['http://dbpedia.org/ontology/thumbnail'][0]['value'])", False, 'from rdflib import URIRef, BNode, Literal, Graph\n'), ((198, 29, 198, 61), 'rdflib.URIRef', 'URIRef', ({(198, 36, 198, 60): '"""http://schema.org/name"""'}, {}), "('http://schema.org/name')", False, 'from rdflib import URIRef, BNode, Literal, Graph\n'), ((198, 63, 198, 78), 'rdflib.Literal', 'Literal', ({(198, 71, 198, 77): 'ln_org'}, {}), '(ln_org)', False, 'from rdflib import URIRef, BNode, Literal, Graph\n'), ((126, 39, 126, 86), 'rdflib.URIRef', 'URIRef', ({(126, 46, 126, 85): '"""https://jpsearch.go.jp/term/type/Time"""'}, {}), "('https://jpsearch.go.jp/term/type/Time')", False, 'from rdflib import URIRef, BNode, Literal, Graph\n'), ((164, 33, 164, 72), 'rdflib.URIRef', 'URIRef', ({(164, 40, 164, 71): '"""http://schema.org/description"""'}, {}), "('http://schema.org/description')", False, 'from rdflib import URIRef, BNode, Literal, Graph\n'), ((164, 74, 164, 117), 'rdflib.Literal', 'Literal', (), '', False, 'from rdflib import URIRef, BNode, Literal, Graph\n'), ((129, 39, 129, 87), 'rdflib.URIRef', 'URIRef', ({(129, 46, 129, 86): '"""https://jpsearch.go.jp/term/type/Place"""'}, {}), "('https://jpsearch.go.jp/term/type/Place')", False, 'from rdflib import URIRef, BNode, Literal, Graph\n'), ((158, 49, 158, 72), 'rdflib.Literal', 'Literal', ({(158, 57, 158, 71): "label['value']"}, {}), "(label['value'])", False, 'from rdflib import URIRef, BNode, Literal, Graph\n'), ((172, 37, 172, 83), 'rdflib.URIRef', 'URIRef', ({(172, 44, 172, 82): '"""http://www.w3.org/2002/07/owl#sameAs"""'}, {}), "('http://www.w3.org/2002/07/owl#sameAs')", False, 'from rdflib import URIRef, BNode, Literal, Graph\n'), ((172, 85, 172, 98), 'rdflib.URIRef', 'URIRef', ({(172, 92, 172, 97): 'value'}, {}), '(value)', False, 'from rdflib import URIRef, BNode, Literal, Graph\n'), ((132, 39, 132, 87), 'rdflib.URIRef', 'URIRef', ({(132, 46, 132, 86): '"""https://jpsearch.go.jp/term/type/Event"""'}, {}), "('https://jpsearch.go.jp/term/type/Event')", False, 'from rdflib import URIRef, BNode, Literal, Graph\n'), ((135, 39, 135, 94), 'rdflib.URIRef', 'URIRef', ({(135, 46, 135, 93): '"""https://jpsearch.go.jp/term/type/Organization"""'}, {}), "('https://jpsearch.go.jp/term/type/Organization')", False, 'from rdflib import URIRef, BNode, Literal, Graph\n'), ((138, 39, 138, 89), 'rdflib.URIRef', 'URIRef', ({(138, 46, 138, 88): '"""https://jpsearch.go.jp/term/type/Keyword"""'}, {}), "('https://jpsearch.go.jp/term/type/Keyword')", False, 'from rdflib import URIRef, BNode, Literal, Graph\n'), ((141, 39, 141, 86), 'rdflib.URIRef', 'URIRef', ({(141, 46, 141, 85): '"""https://jpsearch.go.jp/term/type/Type"""'}, {}), "('https://jpsearch.go.jp/term/type/Type')", False, 'from rdflib import URIRef, BNode, Literal, Graph\n')]
nuft/can-bootloader
client/tests/test_config_read_tool.py
18dd77dae1fb2328dac1fd1df2c9e5d5c936771e
import unittest try: from unittest.mock import * except ImportError: from mock import * from msgpack import * import bootloader_read_config from commands import * import sys import json class ReadConfigToolTestCase(unittest.TestCase): @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.open_connection') @patch('builtins.print') def test_integration(self, print_mock, open_conn, write_command, write_command_retry): sys.argv = "test.py -p /dev/ttyUSB0 0 1 2".split() configs = [{'id': i} for i in range(3)] write_command_retry.return_value = { i: packb(configs[i]) for i in range(3) } open_conn.return_value = object() bootloader_read_config.main() write_command_retry.assert_any_call(open_conn.return_value, encode_read_config(), [0, 1, 2]) all_configs = {i: configs[i] for i in range(3)} print_mock.assert_any_call(json.dumps(all_configs, indent=4, sort_keys=True)) @patch('utils.open_connection') @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.read_can_datagrams') @patch('builtins.print') def test_network_discovery(self, print_mock, read_can_datagram, write_command, write_command_retry, open_conn): """ Checks if we can perform a whole network discovery. """ sys.argv = "test.py -p /dev/ttyUSB0 --all".split() # The first two board answers the ping board_answers = [(b'', [0], i) for i in range(1, 3)] + [None] read_can_datagram.return_value = iter(board_answers) write_command_retry.return_value = { i: packb({'id': i}) for i in range(1, 3) } bootloader_read_config.main() write_command.assert_any_call(open_conn.return_value, encode_ping(), list(range(1, 128)))
[((32, 8, 32, 37), 'bootloader_read_config.main', 'bootloader_read_config.main', ({}, {}), '()', False, 'import bootloader_read_config\n'), ((63, 8, 63, 37), 'bootloader_read_config.main', 'bootloader_read_config.main', ({}, {}), '()', False, 'import bootloader_read_config\n'), ((39, 35, 40, 61), 'json.dumps', 'json.dumps', (), '', False, 'import json\n')]
Pyrrolidine/letterboxd-bot
bot.py
b2cd1364e00c3ec6fb70be9c8be7a8b707a8ffbe
import logging from asyncio import sleep import discord from discord.ext import commands from config import SETTINGS from crew import crew_embed from diary import diary_embed from film import film_embed from helpers import LetterboxdError from list_ import list_embed from review import review_embed from user import user_embed logging.basicConfig( level=logging.INFO, format='%(asctime)s | %(message)s', datefmt='%m/%d %H:%M:%S') bot = commands.Bot(command_prefix='!', case_insensitive=True) bot.remove_command('help') @bot.event async def on_ready(): logging.info( 'Logged in %d servers as %s' % (len(bot.guilds), bot.user.name)) bot.loop.create_task(update_stats()) @bot.event async def on_message(message): if message.content.startswith('!'): message.content = message.content.replace('’', '').replace('‘', '') await bot.process_commands(message) async def update_stats(): while True: await bot.change_presence( activity=discord.Game('!helplb - {} servers'.format( len(bot.guilds)))) await sleep(900) @bot.event async def on_command_error(ctx, error): if isinstance(error, commands.MissingRequiredArgument): await ctx.send('This command requires a parameter.') elif isinstance(error, commands.BotMissingPermissions): await ctx.send('This command requires the {} permission.'.format( ', '.join(err for err in error.missing_perms))) elif isinstance(error, (commands.CommandNotFound, commands.CheckFailure)): return elif isinstance(error, commands.CommandInvokeError): if isinstance(error.original, discord.HTTPException): return else: await ctx.send('Sorry, the command crashed. :/') logging.error(ctx.message.content) raise error async def send_msg(ctx, msg): if isinstance(msg, discord.Embed): await ctx.send(embed=msg) else: await ctx.send(msg) # Commands @bot.command() async def helplb(ctx): help_embed = discord.Embed(colour=discord.Color.from_rgb(54, 57, 62)) help_embed.set_thumbnail(url='https://i.imgur.com/Kr1diFu.png') help_embed.set_author( name='Letterboxd Bot', icon_url='https://i.imgur.com/5VALKVy.jpg') help_embed.set_footer( text='Created by Porkepik#2664', icon_url='https://i.imgur.com/li4cLpd.png') for key, value in SETTINGS['help'].items(): help_embed.add_field(name=key, value=value, inline=False) help_embed.description = 'Invite Bot | '\ + '[GitHub](https://github.com/Porkepik/Letterboxd-Bot)' await ctx.send(embed=help_embed) @bot.command() async def user(ctx, username): try: msg = await user_embed(username) except LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command() async def diary(ctx, username): try: msg = await diary_embed(username) except LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['actor', 'actress', 'director']) async def crew(ctx, *, arg): try: msg = await crew_embed(arg, ctx.invoked_with) except LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['movie']) async def film(ctx, *, arg): try: # eiga.me ratings for specific servers if ctx.guild and ctx.guild.id in SETTINGS['mkdb_servers']: msg = await film_embed(arg, True) else: msg = await film_embed(arg) except LetterboxdError as err: msg = err await send_msg(ctx, msg) async def check_if_two_args(ctx): msg = ctx.message.content.split() if len(msg) < 3: await ctx.send('This command requires 2 parameters.') return len(msg) > 2 @bot.command(name='list') @commands.check(check_if_two_args) async def list_(ctx, username, *args): try: msg = await list_embed(username, ' '.join(str(i) for i in args)) except LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['entry']) @commands.check(check_if_two_args) async def review(ctx, username, *args): try: msg = await review_embed(username, ' '.join(str(i) for i in args)) except LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command(name='del') @commands.bot_has_permissions(manage_messages=True) async def delete(ctx): await ctx.message.delete() found_bot_msg = False found_usr_cmd = False cmd_list = list() for command in bot.commands: cmd_list.append('!' + command.name) for alias in command.aliases: cmd_list.append('!' + alias) async for log_message in ctx.channel.history(limit=30): if log_message.author.id == bot.user.id and not found_bot_msg: bot_message = log_message found_bot_msg = True elif found_bot_msg: if log_message.content: first_word = log_message.content.split()[0] else: continue if first_word in cmd_list: found_usr_cmd = True cmd_message = log_message break if found_usr_cmd: if not ctx.author.permissions_in(ctx.channel).manage_messages: if not cmd_message.author.id == ctx.author.id: return await cmd_message.delete() await bot_message.delete() bot.run(SETTINGS['discord'])
[((15, 0, 18, 29), 'logging.basicConfig', 'logging.basicConfig', (), '', False, 'import logging\n'), ((20, 6, 20, 61), 'discord.ext.commands.Bot', 'commands.Bot', (), '', False, 'from discord.ext import commands\n'), ((138, 1, 138, 34), 'discord.ext.commands.check', 'commands.check', ({(138, 16, 138, 33): 'check_if_two_args'}, {}), '(check_if_two_args)', False, 'from discord.ext import commands\n'), ((148, 1, 148, 34), 'discord.ext.commands.check', 'commands.check', ({(148, 16, 148, 33): 'check_if_two_args'}, {}), '(check_if_two_args)', False, 'from discord.ext import commands\n'), ((158, 1, 158, 51), 'discord.ext.commands.bot_has_permissions', 'commands.bot_has_permissions', (), '', False, 'from discord.ext import commands\n'), ((43, 14, 43, 24), 'asyncio.sleep', 'sleep', ({(43, 20, 43, 23): '(900)'}, {}), '(900)', False, 'from asyncio import sleep\n'), ((76, 38, 76, 72), 'discord.Color.from_rgb', 'discord.Color.from_rgb', ({(76, 61, 76, 63): '54', (76, 65, 76, 67): '57', (76, 69, 76, 71): '62'}, {}), '(54, 57, 62)', False, 'import discord\n'), ((93, 20, 93, 40), 'user.user_embed', 'user_embed', ({(93, 31, 93, 39): 'username'}, {}), '(username)', False, 'from user import user_embed\n'), ((102, 20, 102, 41), 'diary.diary_embed', 'diary_embed', ({(102, 32, 102, 40): 'username'}, {}), '(username)', False, 'from diary import diary_embed\n'), ((111, 20, 111, 53), 'crew.crew_embed', 'crew_embed', ({(111, 31, 111, 34): 'arg', (111, 36, 111, 52): 'ctx.invoked_with'}, {}), '(arg, ctx.invoked_with)', False, 'from crew import crew_embed\n'), ((122, 24, 122, 45), 'film.film_embed', 'film_embed', ({(122, 35, 122, 38): 'arg', (122, 40, 122, 44): '(True)'}, {}), '(arg, True)', False, 'from film import film_embed\n'), ((124, 24, 124, 39), 'film.film_embed', 'film_embed', ({(124, 35, 124, 38): 'arg'}, {}), '(arg)', False, 'from film import film_embed\n'), ((60, 8, 60, 42), 'logging.error', 'logging.error', ({(60, 22, 60, 41): 'ctx.message.content'}, {}), '(ctx.message.content)', False, 'import logging\n')]
wgifford/ray
python/ray/experimental/workflow/execution.py
8acb469b047cd9b327c9477a13b030eb7357860e
import asyncio import logging import time from typing import Set, List, Tuple, Optional, TYPE_CHECKING import uuid import ray from ray.experimental.workflow import workflow_context from ray.experimental.workflow import workflow_storage from ray.experimental.workflow.common import (Workflow, WorkflowStatus, WorkflowMetaData, StepType) from ray.experimental.workflow.step_executor import commit_step from ray.experimental.workflow.storage import get_global_storage from ray.experimental.workflow.workflow_access import ( flatten_workflow_output, get_or_create_management_actor, get_management_actor) if TYPE_CHECKING: from ray.experimental.workflow.step_executor import WorkflowExecutionResult logger = logging.getLogger(__name__) def run(entry_workflow: Workflow, workflow_id: Optional[str] = None, overwrite: bool = True) -> ray.ObjectRef: """Run a workflow asynchronously. # TODO(suquark): The current "run" always overwrite existing workflow. # We need to fix this later. """ store = get_global_storage() assert ray.is_initialized() if workflow_id is None: # Workflow ID format: {Entry workflow UUID}.{Unix time to nanoseconds} workflow_id = f"{str(uuid.uuid4())}.{time.time():.9f}" logger.info(f"Workflow job created. [id=\"{workflow_id}\", storage_url=" f"\"{store.storage_url}\"].") with workflow_context.workflow_step_context(workflow_id, store.storage_url): # checkpoint the workflow ws = workflow_storage.get_workflow_storage(workflow_id) commit_step(ws, "", entry_workflow) workflow_manager = get_or_create_management_actor() ignore_existing = (entry_workflow.data.step_type != StepType.FUNCTION) # NOTE: It is important to 'ray.get' the returned output. This # ensures caller of 'run()' holds the reference to the workflow # result. Otherwise if the actor removes the reference of the # workflow output, the caller may fail to resolve the result. result: "WorkflowExecutionResult" = ray.get( workflow_manager.run_or_resume.remote(workflow_id, ignore_existing)) if entry_workflow.data.step_type == StepType.FUNCTION: return flatten_workflow_output(workflow_id, result.persisted_output) else: return flatten_workflow_output(workflow_id, result.volatile_output) # TODO(suquark): support recovery with ObjectRef inputs. def resume(workflow_id: str) -> ray.ObjectRef: """Resume a workflow asynchronously. See "api.resume()" for details. """ storage = get_global_storage() logger.info(f"Resuming workflow [id=\"{workflow_id}\", storage_url=" f"\"{storage.storage_url}\"].") workflow_manager = get_or_create_management_actor() # NOTE: It is important to 'ray.get' the returned output. This # ensures caller of 'run()' holds the reference to the workflow # result. Otherwise if the actor removes the reference of the # workflow output, the caller may fail to resolve the result. result: "WorkflowExecutionResult" = ray.get( workflow_manager.run_or_resume.remote( workflow_id, ignore_existing=False)) logger.info(f"Workflow job {workflow_id} resumed.") return flatten_workflow_output(workflow_id, result.persisted_output) def get_output(workflow_id: str, name: Optional[str]) -> ray.ObjectRef: """Get the output of a running workflow. See "api.get_output()" for details. """ assert ray.is_initialized() try: workflow_manager = get_management_actor() except ValueError as e: raise ValueError( "Failed to connect to the workflow management " "actor. The workflow could have already failed. You can use " "workflow.resume() to resume the workflow.") from e output = ray.get(workflow_manager.get_output.remote(workflow_id, name)) return flatten_workflow_output(workflow_id, output) def cancel(workflow_id: str) -> None: try: workflow_manager = get_management_actor() ray.get(workflow_manager.cancel_workflow.remote(workflow_id)) except ValueError: wf_store = workflow_storage.get_workflow_storage(workflow_id) wf_store.save_workflow_meta(WorkflowMetaData(WorkflowStatus.CANCELED)) def get_status(workflow_id: str) -> Optional[WorkflowStatus]: try: workflow_manager = get_management_actor() running = ray.get( workflow_manager.is_workflow_running.remote(workflow_id)) except Exception: running = False if running: return WorkflowStatus.RUNNING store = workflow_storage.get_workflow_storage(workflow_id) meta = store.load_workflow_meta() if meta is None: raise ValueError(f"No such workflow_id {workflow_id}") return meta.status def list_all(status_filter: Set[WorkflowStatus] ) -> List[Tuple[str, WorkflowStatus]]: try: workflow_manager = get_management_actor() except ValueError: workflow_manager = None if workflow_manager is None: runnings = [] else: runnings = ray.get(workflow_manager.list_running_workflow.remote()) if WorkflowStatus.RUNNING in status_filter and len(status_filter) == 1: return [(r, WorkflowStatus.RUNNING) for r in runnings] runnings = set(runnings) # Here we don't have workflow id, so use empty one instead store = workflow_storage.get_workflow_storage("") ret = [] for (k, s) in store.list_workflow(): if s == WorkflowStatus.RUNNING and k not in runnings: s = WorkflowStatus.RESUMABLE if s in status_filter: ret.append((k, s)) return ret def resume_all(with_failed: bool) -> List[Tuple[str, ray.ObjectRef]]: filter_set = {WorkflowStatus.RESUMABLE} if with_failed: filter_set.add(WorkflowStatus.FAILED) all_failed = list_all(filter_set) try: workflow_manager = get_management_actor() except Exception as e: raise RuntimeError("Failed to get management actor") from e async def _resume_one(wid: str) -> Tuple[str, Optional[ray.ObjectRef]]: try: result: "WorkflowExecutionResult" = ( await workflow_manager.run_or_resume.remote(wid)) obj = flatten_workflow_output(wid, result.persisted_output) return wid, obj except Exception: logger.error(f"Failed to resume workflow {wid}") return (wid, None) ret = workflow_storage.asyncio_run( asyncio.gather(*[_resume_one(wid) for (wid, _) in all_failed])) return [(wid, obj) for (wid, obj) in ret if obj is not None]
[((22, 9, 22, 36), 'logging.getLogger', 'logging.getLogger', ({(22, 27, 22, 35): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((33, 12, 33, 32), 'ray.experimental.workflow.storage.get_global_storage', 'get_global_storage', ({}, {}), '()', False, 'from ray.experimental.workflow.storage import get_global_storage\n'), ((34, 11, 34, 31), 'ray.is_initialized', 'ray.is_initialized', ({}, {}), '()', False, 'import ray\n'), ((66, 14, 66, 34), 'ray.experimental.workflow.storage.get_global_storage', 'get_global_storage', ({}, {}), '()', False, 'from ray.experimental.workflow.storage import get_global_storage\n'), ((69, 23, 69, 55), 'ray.experimental.workflow.workflow_access.get_or_create_management_actor', 'get_or_create_management_actor', ({}, {}), '()', False, 'from ray.experimental.workflow.workflow_access import flatten_workflow_output, get_or_create_management_actor, get_management_actor\n'), ((78, 11, 78, 72), 'ray.experimental.workflow.workflow_access.flatten_workflow_output', 'flatten_workflow_output', ({(78, 35, 78, 46): 'workflow_id', (78, 48, 78, 71): 'result.persisted_output'}, {}), '(workflow_id, result.persisted_output)', False, 'from ray.experimental.workflow.workflow_access import flatten_workflow_output, get_or_create_management_actor, get_management_actor\n'), ((85, 11, 85, 31), 'ray.is_initialized', 'ray.is_initialized', ({}, {}), '()', False, 'import ray\n'), ((94, 11, 94, 55), 'ray.experimental.workflow.workflow_access.flatten_workflow_output', 'flatten_workflow_output', ({(94, 35, 94, 46): 'workflow_id', (94, 48, 94, 54): 'output'}, {}), '(workflow_id, output)', False, 'from ray.experimental.workflow.workflow_access import flatten_workflow_output, get_or_create_management_actor, get_management_actor\n'), ((115, 12, 115, 62), 'ray.experimental.workflow.workflow_storage.get_workflow_storage', 'workflow_storage.get_workflow_storage', ({(115, 50, 115, 61): 'workflow_id'}, {}), '(workflow_id)', False, 'from ray.experimental.workflow import workflow_storage\n'), ((138, 12, 138, 53), 'ray.experimental.workflow.workflow_storage.get_workflow_storage', 'workflow_storage.get_workflow_storage', ({(138, 50, 138, 52): '""""""'}, {}), "('')", False, 'from ray.experimental.workflow import workflow_storage\n'), ((41, 9, 42, 66), 'ray.experimental.workflow.workflow_context.workflow_step_context', 'workflow_context.workflow_step_context', ({(41, 48, 41, 59): 'workflow_id', (42, 48, 42, 65): 'store.storage_url'}, {}), '(workflow_id, store.storage_url)', False, 'from ray.experimental.workflow import workflow_context\n'), ((44, 13, 44, 63), 'ray.experimental.workflow.workflow_storage.get_workflow_storage', 'workflow_storage.get_workflow_storage', ({(44, 51, 44, 62): 'workflow_id'}, {}), '(workflow_id)', False, 'from ray.experimental.workflow import workflow_storage\n'), ((45, 8, 45, 43), 'ray.experimental.workflow.step_executor.commit_step', 'commit_step', ({(45, 20, 45, 22): 'ws', (45, 24, 45, 26): '""""""', (45, 28, 45, 42): 'entry_workflow'}, {}), "(ws, '', entry_workflow)", False, 'from ray.experimental.workflow.step_executor import commit_step\n'), ((46, 27, 46, 59), 'ray.experimental.workflow.workflow_access.get_or_create_management_actor', 'get_or_create_management_actor', ({}, {}), '()', False, 'from ray.experimental.workflow.workflow_access import flatten_workflow_output, get_or_create_management_actor, get_management_actor\n'), ((87, 27, 87, 49), 'ray.experimental.workflow.workflow_access.get_management_actor', 'get_management_actor', ({}, {}), '()', False, 'from ray.experimental.workflow.workflow_access import flatten_workflow_output, get_or_create_management_actor, get_management_actor\n'), ((99, 27, 99, 49), 'ray.experimental.workflow.workflow_access.get_management_actor', 'get_management_actor', ({}, {}), '()', False, 'from ray.experimental.workflow.workflow_access import flatten_workflow_output, get_or_create_management_actor, get_management_actor\n'), ((108, 27, 108, 49), 'ray.experimental.workflow.workflow_access.get_management_actor', 'get_management_actor', ({}, {}), '()', False, 'from ray.experimental.workflow.workflow_access import flatten_workflow_output, get_or_create_management_actor, get_management_actor\n'), ((125, 27, 125, 49), 'ray.experimental.workflow.workflow_access.get_management_actor', 'get_management_actor', ({}, {}), '()', False, 'from ray.experimental.workflow.workflow_access import flatten_workflow_output, get_or_create_management_actor, get_management_actor\n'), ((154, 27, 154, 49), 'ray.experimental.workflow.workflow_access.get_management_actor', 'get_management_actor', ({}, {}), '()', False, 'from ray.experimental.workflow.workflow_access import flatten_workflow_output, get_or_create_management_actor, get_management_actor\n'), ((56, 19, 57, 67), 'ray.experimental.workflow.workflow_access.flatten_workflow_output', 'flatten_workflow_output', ({(56, 43, 56, 54): 'workflow_id', (57, 43, 57, 66): 'result.persisted_output'}, {}), '(workflow_id, result.persisted_output)', False, 'from ray.experimental.workflow.workflow_access import flatten_workflow_output, get_or_create_management_actor, get_management_actor\n'), ((59, 19, 59, 79), 'ray.experimental.workflow.workflow_access.flatten_workflow_output', 'flatten_workflow_output', ({(59, 43, 59, 54): 'workflow_id', (59, 56, 59, 78): 'result.volatile_output'}, {}), '(workflow_id, result.volatile_output)', False, 'from ray.experimental.workflow.workflow_access import flatten_workflow_output, get_or_create_management_actor, get_management_actor\n'), ((102, 19, 102, 69), 'ray.experimental.workflow.workflow_storage.get_workflow_storage', 'workflow_storage.get_workflow_storage', ({(102, 57, 102, 68): 'workflow_id'}, {}), '(workflow_id)', False, 'from ray.experimental.workflow import workflow_storage\n'), ((162, 18, 162, 71), 'ray.experimental.workflow.workflow_access.flatten_workflow_output', 'flatten_workflow_output', ({(162, 42, 162, 45): 'wid', (162, 47, 162, 70): 'result.persisted_output'}, {}), '(wid, result.persisted_output)', False, 'from ray.experimental.workflow.workflow_access import flatten_workflow_output, get_or_create_management_actor, get_management_actor\n'), ((37, 23, 37, 34), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((103, 36, 103, 77), 'ray.experimental.workflow.common.WorkflowMetaData', 'WorkflowMetaData', ({(103, 53, 103, 76): 'WorkflowStatus.CANCELED'}, {}), '(WorkflowStatus.CANCELED)', False, 'from ray.experimental.workflow.common import Workflow, WorkflowStatus, WorkflowMetaData, StepType\n'), ((37, 29, 37, 41), 'uuid.uuid4', 'uuid.uuid4', ({}, {}), '()', False, 'import uuid\n')]
bertonha/python-zeep
src/zeep/wsse/__init__.py
748f4e028db2ef498bc6dd1e60d3555b7688f08c
from .compose import Compose # noqa from .signature import BinarySignature, Signature, MemorySignature # noqa from .username import UsernameToken # noqa
[]
peteboi/Python-Scripts
Complab assignment.py
d84e352c41cff3f459d88c83bc81f6dc2f25ed05
# -*- coding: utf-8 -*- import numpy as np import matplotlib.pyplot as plt def orbit(u): x,y,v_x,v_y = u r=np.hypot(x,y) #r= 1.521e+06 #M,G=1.989e+30,6.7e-11 M,G=20,110 f=G*M/r**3 return np.array([v_x,v_y,-f*x,-f*y]) def RK4(f,u,dt): k1=f(u)*dt k2=f(u+0.5*k1)*dt k3=f(u+0.5*k2)*dt k4=f(u+k3)*dt return u+(k1+2*k2+2*k3+k4)/6 def RK4_int(f,y0,tspan): y=np.zeros([len(tspan),len(y0)]) y[0,:] =y0 for k in range (1,len(tspan)): y[k,:] = RK4(f,y[k-1],tspan[k]-tspan[k-1]) return y dt=0.1 t = np.arange(0,10,dt) y0=np.array([10, 0.0, 10, 10]) sol_rk4=RK4_int(orbit,y0,t) x,y,v_x,v_y = sol_rk4.T plt.grid() plt.plot(x,y) plt.show()
[((31, 4, 31, 22), 'numpy.arange', 'np.arange', ({(31, 14, 31, 15): '0', (31, 16, 31, 18): '10', (31, 19, 31, 21): 'dt'}, {}), '(0, 10, dt)', True, 'import numpy as np\n'), ((32, 3, 32, 30), 'numpy.array', 'np.array', ({(32, 12, 32, 29): '[10, 0.0, 10, 10]'}, {}), '([10, 0.0, 10, 10])', True, 'import numpy as np\n'), ((36, 0, 36, 10), 'matplotlib.pyplot.grid', 'plt.grid', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((37, 0, 37, 13), 'matplotlib.pyplot.plot', 'plt.plot', ({(37, 9, 37, 10): 'x', (37, 11, 37, 12): 'y'}, {}), '(x, y)', True, 'import matplotlib.pyplot as plt\n'), ((38, 0, 38, 10), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((7, 6, 7, 19), 'numpy.hypot', 'np.hypot', ({(7, 15, 7, 16): 'x', (7, 17, 7, 18): 'y'}, {}), '(x, y)', True, 'import numpy as np\n'), ((12, 11, 12, 40), 'numpy.array', 'np.array', ({(12, 20, 12, 39): '[v_x, v_y, -f * x, -f * y]'}, {}), '([v_x, v_y, -f * x, -f * y])', True, 'import numpy as np\n')]
gamabounty/django-factory-generator
factory_generator/management/commands/generate_factories.py
284184b22f3564a7a915ac3f3363e588d3721158
import os from django.apps import apps from django.core.management.base import BaseCommand from factory_generator.generator import FactoryAppGenerator class Command(BaseCommand): help = 'Create model factories for all installed apps' def handle(self, *args, **options): created_files = [] for app in apps.get_app_configs(): factory_app_generator = FactoryAppGenerator(app) created_files += factory_app_generator.create_files() self.stdout.write(self.style.SUCCESS('Successfully created factories:')) for created_file in created_files: self.stdout.write(self.style.SUCCESS('- ' + created_file))
[((14, 19, 14, 41), 'django.apps.apps.get_app_configs', 'apps.get_app_configs', ({}, {}), '()', False, 'from django.apps import apps\n'), ((15, 36, 15, 60), 'factory_generator.generator.FactoryAppGenerator', 'FactoryAppGenerator', ({(15, 56, 15, 59): 'app'}, {}), '(app)', False, 'from factory_generator.generator import FactoryAppGenerator\n')]
qiangli/cellranger
mro/stages/analyzer/run_differential_expression/__init__.py
046e24c3275cfbd4516a6ebc064594513a5c45b7
#!/usr/bin/env python # # Copyright (c) 2017 10X Genomics, Inc. All rights reserved. # import cellranger.analysis.diffexp as cr_diffexp import cellranger.analysis.io as analysis_io from cellranger.analysis.singlegenome import SingleGenomeAnalysis import cellranger.h5_constants as h5_constants import cellranger.analysis.constants as analysis_constants import cellranger.matrix as cr_matrix import cellranger.io as cr_io import cellranger.library_constants as lib_constants NUM_THREADS_MIN = 4 #TODO Not clear why this stage takes > 1 thread. Martian thinks it does and kills it on long jobs __MRO__ = """ stage RUN_DIFFERENTIAL_EXPRESSION( in h5 matrix_h5, in h5 clustering_h5, in bool skip, in int random_seed, in int max_clusters, out h5 diffexp_h5, out path diffexp_csv, src py "stages/analyzer/run_differential_expression", ) split using ( in string clustering_key, ) """ def split(args): if args.skip: return {'chunks': [{'__mem_gb': h5_constants.MIN_MEM_GB}]} chunks = [] # FIXME: Add one for reasons unknown matrix_mem_gb = 1.8 * cr_matrix.CountMatrix.get_mem_gb_from_matrix_h5(args.matrix_h5) chunk_mem_gb = int(max(matrix_mem_gb, h5_constants.MIN_MEM_GB)) # HACK - give big jobs more threads in order to avoid overloading a node threads = min(cr_io.get_thread_request_from_mem_gb(chunk_mem_gb), NUM_THREADS_MIN) threads = 4 for key in SingleGenomeAnalysis.load_clustering_keys_from_h5(args.clustering_h5): chunks.append({ 'clustering_key': key, '__mem_gb': chunk_mem_gb, '__threads': threads, }) return {'chunks': chunks, 'join': {'__mem_gb' : 1}} def main(args, outs): if args.skip: return matrix = cr_matrix.CountMatrix.load_h5_file(args.matrix_h5) # For now, only compute for gene expression features matrix = matrix.select_features_by_type(lib_constants.GENE_EXPRESSION_LIBRARY_TYPE) clustering = SingleGenomeAnalysis.load_clustering_from_h5(args.clustering_h5, args.clustering_key) diffexp = cr_diffexp.run_differential_expression(matrix, clustering.clusters) with analysis_io.open_h5_for_writing(outs.diffexp_h5) as f: cr_diffexp.save_differential_expression_h5(f, args.clustering_key, diffexp) cr_diffexp.save_differential_expression_csv(args.clustering_key, diffexp, matrix, outs.diffexp_csv) def join(args, outs, chunk_defs, chunk_outs): if args.skip: return chunk_h5s = [chunk_out.diffexp_h5 for chunk_out in chunk_outs] chunk_csv_dirs = [chunk_out.diffexp_csv for chunk_out in chunk_outs] analysis_io.combine_h5_files(chunk_h5s, outs.diffexp_h5, [analysis_constants.ANALYSIS_H5_DIFFERENTIAL_EXPRESSION_GROUP, analysis_constants.ANALYSIS_H5_KMEANS_DIFFERENTIAL_EXPRESSION_GROUP]) for csv_dir in chunk_csv_dirs: cr_io.copytree(csv_dir, outs.diffexp_csv, allow_existing=True)
[((46, 15, 46, 84), 'cellranger.analysis.singlegenome.SingleGenomeAnalysis.load_clustering_keys_from_h5', 'SingleGenomeAnalysis.load_clustering_keys_from_h5', ({(46, 65, 46, 83): 'args.clustering_h5'}, {}), '(args.clustering_h5)', False, 'from cellranger.analysis.singlegenome import SingleGenomeAnalysis\n'), ((59, 13, 59, 63), 'cellranger.matrix.CountMatrix.load_h5_file', 'cr_matrix.CountMatrix.load_h5_file', ({(59, 48, 59, 62): 'args.matrix_h5'}, {}), '(args.matrix_h5)', True, 'import cellranger.matrix as cr_matrix\n'), ((64, 17, 64, 102), 'cellranger.analysis.singlegenome.SingleGenomeAnalysis.load_clustering_from_h5', 'SingleGenomeAnalysis.load_clustering_from_h5', ({(64, 62, 64, 80): 'args.clustering_h5', (64, 82, 64, 101): 'args.clustering_key'}, {}), '(args.clustering_h5, args.\n clustering_key)', False, 'from cellranger.analysis.singlegenome import SingleGenomeAnalysis\n'), ((66, 14, 66, 81), 'cellranger.analysis.diffexp.run_differential_expression', 'cr_diffexp.run_differential_expression', ({(66, 53, 66, 59): 'matrix', (66, 61, 66, 80): 'clustering.clusters'}, {}), '(matrix, clustering.clusters)', True, 'import cellranger.analysis.diffexp as cr_diffexp\n'), ((71, 4, 71, 103), 'cellranger.analysis.diffexp.save_differential_expression_csv', 'cr_diffexp.save_differential_expression_csv', ({(71, 48, 71, 67): 'args.clustering_key', (71, 69, 71, 76): 'diffexp', (71, 78, 71, 84): 'matrix', (71, 86, 71, 102): 'outs.diffexp_csv'}, {}), '(args.clustering_key, diffexp,\n matrix, outs.diffexp_csv)', True, 'import cellranger.analysis.diffexp as cr_diffexp\n'), ((80, 4, 81, 125), 'cellranger.analysis.io.combine_h5_files', 'analysis_io.combine_h5_files', ({(80, 33, 80, 42): 'chunk_h5s', (80, 44, 80, 59): 'outs.diffexp_h5', (80, 61, 81, 124): '[analysis_constants.ANALYSIS_H5_DIFFERENTIAL_EXPRESSION_GROUP,\n analysis_constants.ANALYSIS_H5_KMEANS_DIFFERENTIAL_EXPRESSION_GROUP]'}, {}), '(chunk_h5s, outs.diffexp_h5, [\n analysis_constants.ANALYSIS_H5_DIFFERENTIAL_EXPRESSION_GROUP,\n analysis_constants.ANALYSIS_H5_KMEANS_DIFFERENTIAL_EXPRESSION_GROUP])', True, 'import cellranger.analysis.io as analysis_io\n'), ((39, 26, 39, 89), 'cellranger.matrix.CountMatrix.get_mem_gb_from_matrix_h5', 'cr_matrix.CountMatrix.get_mem_gb_from_matrix_h5', ({(39, 74, 39, 88): 'args.matrix_h5'}, {}), '(args.matrix_h5)', True, 'import cellranger.matrix as cr_matrix\n'), ((43, 18, 43, 68), 'cellranger.io.get_thread_request_from_mem_gb', 'cr_io.get_thread_request_from_mem_gb', ({(43, 55, 43, 67): 'chunk_mem_gb'}, {}), '(chunk_mem_gb)', True, 'import cellranger.io as cr_io\n'), ((68, 9, 68, 57), 'cellranger.analysis.io.open_h5_for_writing', 'analysis_io.open_h5_for_writing', ({(68, 41, 68, 56): 'outs.diffexp_h5'}, {}), '(outs.diffexp_h5)', True, 'import cellranger.analysis.io as analysis_io\n'), ((69, 8, 69, 83), 'cellranger.analysis.diffexp.save_differential_expression_h5', 'cr_diffexp.save_differential_expression_h5', ({(69, 51, 69, 52): 'f', (69, 54, 69, 73): 'args.clustering_key', (69, 75, 69, 82): 'diffexp'}, {}), '(f, args.clustering_key, diffexp)', True, 'import cellranger.analysis.diffexp as cr_diffexp\n'), ((84, 8, 84, 70), 'cellranger.io.copytree', 'cr_io.copytree', (), '', True, 'import cellranger.io as cr_io\n')]
ethankward/sympy
sympy/combinatorics/testutil.py
44664d9f625a1c68bc492006cfe1012cb0b49ee4
from sympy.combinatorics import Permutation from sympy.combinatorics.util import _distribute_gens_by_base rmul = Permutation.rmul def _cmp_perm_lists(first, second): """ Compare two lists of permutations as sets. This is used for testing purposes. Since the array form of a permutation is currently a list, Permutation is not hashable and cannot be put into a set. Examples ======== >>> from sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil import _cmp_perm_lists >>> a = Permutation([0, 2, 3, 4, 1]) >>> b = Permutation([1, 2, 0, 4, 3]) >>> c = Permutation([3, 4, 0, 1, 2]) >>> ls1 = [a, b, c] >>> ls2 = [b, c, a] >>> _cmp_perm_lists(ls1, ls2) True """ return {tuple(a) for a in first} == \ {tuple(a) for a in second} def _naive_list_centralizer(self, other, af=False): from sympy.combinatorics.perm_groups import PermutationGroup """ Return a list of elements for the centralizer of a subgroup/set/element. This is a brute force implementation that goes over all elements of the group and checks for membership in the centralizer. It is used to test ``.centralizer()`` from ``sympy.combinatorics.perm_groups``. Examples ======== >>> from sympy.combinatorics.testutil import _naive_list_centralizer >>> from sympy.combinatorics.named_groups import DihedralGroup >>> D = DihedralGroup(4) >>> _naive_list_centralizer(D, D) [Permutation([0, 1, 2, 3]), Permutation([2, 3, 0, 1])] See Also ======== sympy.combinatorics.perm_groups.centralizer """ from sympy.combinatorics.permutations import _af_commutes_with if hasattr(other, 'generators'): elements = list(self.generate_dimino(af=True)) gens = [x._array_form for x in other.generators] commutes_with_gens = lambda x: all(_af_commutes_with(x, gen) for gen in gens) centralizer_list = [] if not af: for element in elements: if commutes_with_gens(element): centralizer_list.append(Permutation._af_new(element)) else: for element in elements: if commutes_with_gens(element): centralizer_list.append(element) return centralizer_list elif hasattr(other, 'getitem'): return _naive_list_centralizer(self, PermutationGroup(other), af) elif hasattr(other, 'array_form'): return _naive_list_centralizer(self, PermutationGroup([other]), af) def _verify_bsgs(group, base, gens): """ Verify the correctness of a base and strong generating set. This is a naive implementation using the definition of a base and a strong generating set relative to it. There are other procedures for verifying a base and strong generating set, but this one will serve for more robust testing. Examples ======== >>> from sympy.combinatorics.named_groups import AlternatingGroup >>> from sympy.combinatorics.testutil import _verify_bsgs >>> A = AlternatingGroup(4) >>> A.schreier_sims() >>> _verify_bsgs(A, A.base, A.strong_gens) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims """ from sympy.combinatorics.perm_groups import PermutationGroup strong_gens_distr = _distribute_gens_by_base(base, gens) current_stabilizer = group for i in range(len(base)): candidate = PermutationGroup(strong_gens_distr[i]) if current_stabilizer.order() != candidate.order(): return False current_stabilizer = current_stabilizer.stabilizer(base[i]) if current_stabilizer.order() != 1: return False return True def _verify_centralizer(group, arg, centr=None): """ Verify the centralizer of a group/set/element inside another group. This is used for testing ``.centralizer()`` from ``sympy.combinatorics.perm_groups`` Examples ======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.perm_groups import PermutationGroup >>> from sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil import _verify_centralizer >>> S = SymmetricGroup(5) >>> A = AlternatingGroup(5) >>> centr = PermutationGroup([Permutation([0, 1, 2, 3, 4])]) >>> _verify_centralizer(S, A, centr) True See Also ======== _naive_list_centralizer, sympy.combinatorics.perm_groups.PermutationGroup.centralizer, _cmp_perm_lists """ if centr is None: centr = group.centralizer(arg) centr_list = list(centr.generate_dimino(af=True)) centr_list_naive = _naive_list_centralizer(group, arg, af=True) return _cmp_perm_lists(centr_list, centr_list_naive) def _verify_normal_closure(group, arg, closure=None): from sympy.combinatorics.perm_groups import PermutationGroup """ Verify the normal closure of a subgroup/subset/element in a group. This is used to test sympy.combinatorics.perm_groups.PermutationGroup.normal_closure Examples ======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.testutil import _verify_normal_closure >>> S = SymmetricGroup(3) >>> A = AlternatingGroup(3) >>> _verify_normal_closure(S, A, closure=A) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.normal_closure """ if closure is None: closure = group.normal_closure(arg) conjugates = set() if hasattr(arg, 'generators'): subgr_gens = arg.generators elif hasattr(arg, '__getitem__'): subgr_gens = arg elif hasattr(arg, 'array_form'): subgr_gens = [arg] for el in group.generate_dimino(): for gen in subgr_gens: conjugates.add(gen ^ el) naive_closure = PermutationGroup(list(conjugates)) return closure.is_subgroup(naive_closure) def canonicalize_naive(g, dummies, sym, *v): """ Canonicalize tensor formed by tensors of the different types g permutation representing the tensor dummies list of dummy indices msym symmetry of the metric v is a list of (base_i, gens_i, n_i, sym_i) for tensors of type `i` base_i, gens_i BSGS for tensors of this type n_i number ot tensors of type `i` sym_i symmetry under exchange of two component tensors of type `i` None no symmetry 0 commuting 1 anticommuting Return 0 if the tensor is zero, else return the array form of the permutation representing the canonical form of the tensor. Examples ======== >>> from sympy.combinatorics.testutil import canonicalize_naive >>> from sympy.combinatorics.tensor_can import get_symmetric_group_sgs >>> from sympy.combinatorics import Permutation, PermutationGroup >>> g = Permutation([1, 3, 2, 0, 4, 5]) >>> base2, gens2 = get_symmetric_group_sgs(2) >>> canonicalize_naive(g, [2, 3], 0, (base2, gens2, 2, 0)) [0, 2, 1, 3, 4, 5] """ from sympy.combinatorics.perm_groups import PermutationGroup from sympy.combinatorics.tensor_can import gens_products, dummy_sgs from sympy.combinatorics.permutations import Permutation, _af_rmul v1 = [] for i in range(len(v)): base_i, gens_i, n_i, sym_i = v[i] v1.append((base_i, gens_i, [[]]*n_i, sym_i)) size, sbase, sgens = gens_products(*v1) dgens = dummy_sgs(dummies, sym, size-2) if isinstance(sym, int): num_types = 1 dummies = [dummies] sym = [sym] else: num_types = len(sym) dgens = [] for i in range(num_types): dgens.extend(dummy_sgs(dummies[i], sym[i], size - 2)) S = PermutationGroup(sgens) D = PermutationGroup([Permutation(x) for x in dgens]) dlist = list(D.generate(af=True)) g = g.array_form st = set() for s in S.generate(af=True): h = _af_rmul(g, s) for d in dlist: q = tuple(_af_rmul(d, h)) st.add(q) a = list(st) a.sort() prev = (0,)*size for h in a: if h[:-2] == prev[:-2]: if h[-1] != prev[-1]: return 0 prev = h return list(a[0]) def graph_certificate(gr): """ Return a certificate for the graph gr adjacency list The graph is assumed to be unoriented and without external lines. Associate to each vertex of the graph a symmetric tensor with number of indices equal to the degree of the vertex; indices are contracted when they correspond to the same line of the graph. The canonical form of the tensor gives a certificate for the graph. This is not an efficient algorithm to get the certificate of a graph. Examples ======== >>> from sympy.combinatorics.testutil import graph_certificate >>> gr1 = {0:[1, 2, 3, 5], 1:[0, 2, 4], 2:[0, 1, 3, 4], 3:[0, 2, 4], 4:[1, 2, 3, 5], 5:[0, 4]} >>> gr2 = {0:[1, 5], 1:[0, 2, 3, 4], 2:[1, 3, 5], 3:[1, 2, 4, 5], 4:[1, 3, 5], 5:[0, 2, 3, 4]} >>> c1 = graph_certificate(gr1) >>> c2 = graph_certificate(gr2) >>> c1 [0, 2, 4, 6, 1, 8, 10, 12, 3, 14, 16, 18, 5, 9, 15, 7, 11, 17, 13, 19, 20, 21] >>> c1 == c2 True """ from sympy.combinatorics.permutations import _af_invert from sympy.combinatorics.tensor_can import get_symmetric_group_sgs, canonicalize items = list(gr.items()) items.sort(key=lambda x: len(x[1]), reverse=True) pvert = [x[0] for x in items] pvert = _af_invert(pvert) # the indices of the tensor are twice the number of lines of the graph num_indices = 0 for v, neigh in items: num_indices += len(neigh) # associate to each vertex its indices; for each line # between two vertices assign the # even index to the vertex which comes first in items, # the odd index to the other vertex vertices = [[] for i in items] i = 0 for v, neigh in items: for v2 in neigh: if pvert[v] < pvert[v2]: vertices[pvert[v]].append(i) vertices[pvert[v2]].append(i+1) i += 2 g = [] for v in vertices: g.extend(v) assert len(g) == num_indices g += [num_indices, num_indices + 1] size = num_indices + 2 assert sorted(g) == list(range(size)) g = Permutation(g) vlen = [0]*(len(vertices[0])+1) for neigh in vertices: vlen[len(neigh)] += 1 v = [] for i in range(len(vlen)): n = vlen[i] if n: base, gens = get_symmetric_group_sgs(i) v.append((base, gens, n, 0)) v.reverse() dummies = list(range(num_indices)) can = canonicalize(g, dummies, 0, *v) return can
[((104, 24, 104, 60), 'sympy.combinatorics.util._distribute_gens_by_base', '_distribute_gens_by_base', ({(104, 49, 104, 53): 'base', (104, 55, 104, 59): 'gens'}, {}), '(base, gens)', False, 'from sympy.combinatorics.util import _distribute_gens_by_base\n'), ((231, 25, 231, 43), 'sympy.combinatorics.tensor_can.gens_products', 'gens_products', ({(231, 39, 231, 42): '*v1'}, {}), '(*v1)', False, 'from sympy.combinatorics.tensor_can import gens_products, dummy_sgs\n'), ((232, 12, 232, 43), 'sympy.combinatorics.tensor_can.dummy_sgs', 'dummy_sgs', ({(232, 22, 232, 29): 'dummies', (232, 31, 232, 34): 'sym', (232, 36, 232, 42): 'size - 2'}, {}), '(dummies, sym, size - 2)', False, 'from sympy.combinatorics.tensor_can import gens_products, dummy_sgs\n'), ((242, 8, 242, 31), 'sympy.combinatorics.perm_groups.PermutationGroup', 'PermutationGroup', ({(242, 25, 242, 30): 'sgens'}, {}), '(sgens)', False, 'from sympy.combinatorics.perm_groups import PermutationGroup\n'), ((297, 12, 297, 29), 'sympy.combinatorics.permutations._af_invert', '_af_invert', ({(297, 23, 297, 28): 'pvert'}, {}), '(pvert)', False, 'from sympy.combinatorics.permutations import _af_invert\n'), ((322, 8, 322, 22), 'sympy.combinatorics.permutations.Permutation', 'Permutation', ({(322, 20, 322, 21): 'g'}, {}), '(g)', False, 'from sympy.combinatorics.permutations import Permutation, _af_rmul\n'), ((334, 10, 334, 41), 'sympy.combinatorics.tensor_can.canonicalize', 'canonicalize', ({(334, 23, 334, 24): 'g', (334, 26, 334, 33): 'dummies', (334, 35, 334, 36): '0', (334, 38, 334, 40): '*v'}, {}), '(g, dummies, 0, *v)', False, 'from sympy.combinatorics.tensor_can import get_symmetric_group_sgs, canonicalize\n'), ((107, 20, 107, 58), 'sympy.combinatorics.perm_groups.PermutationGroup', 'PermutationGroup', ({(107, 37, 107, 57): 'strong_gens_distr[i]'}, {}), '(strong_gens_distr[i])', False, 'from sympy.combinatorics.perm_groups import PermutationGroup\n'), ((248, 12, 248, 26), 'sympy.combinatorics.permutations._af_rmul', '_af_rmul', ({(248, 21, 248, 22): 'g', (248, 24, 248, 25): 's'}, {}), '(g, s)', False, 'from sympy.combinatorics.permutations import Permutation, _af_rmul\n'), ((241, 21, 241, 60), 'sympy.combinatorics.tensor_can.dummy_sgs', 'dummy_sgs', ({(241, 31, 241, 41): 'dummies[i]', (241, 43, 241, 49): 'sym[i]', (241, 51, 241, 59): '(size - 2)'}, {}), '(dummies[i], sym[i], size - 2)', False, 'from sympy.combinatorics.tensor_can import gens_products, dummy_sgs\n'), ((243, 26, 243, 40), 'sympy.combinatorics.permutations.Permutation', 'Permutation', ({(243, 38, 243, 39): 'x'}, {}), '(x)', False, 'from sympy.combinatorics.permutations import Permutation, _af_rmul\n'), ((330, 25, 330, 51), 'sympy.combinatorics.tensor_can.get_symmetric_group_sgs', 'get_symmetric_group_sgs', ({(330, 49, 330, 50): 'i'}, {}), '(i)', False, 'from sympy.combinatorics.tensor_can import get_symmetric_group_sgs, canonicalize\n'), ((73, 45, 73, 68), 'sympy.combinatorics.perm_groups.PermutationGroup', 'PermutationGroup', ({(73, 62, 73, 67): 'other'}, {}), '(other)', False, 'from sympy.combinatorics.perm_groups import PermutationGroup\n'), ((250, 22, 250, 36), 'sympy.combinatorics.permutations._af_rmul', '_af_rmul', ({(250, 31, 250, 32): 'd', (250, 34, 250, 35): 'h'}, {}), '(d, h)', False, 'from sympy.combinatorics.permutations import Permutation, _af_rmul\n'), ((61, 43, 61, 68), 'sympy.combinatorics.permutations._af_commutes_with', '_af_commutes_with', ({(61, 61, 61, 62): 'x', (61, 64, 61, 67): 'gen'}, {}), '(x, gen)', False, 'from sympy.combinatorics.permutations import _af_commutes_with\n'), ((75, 45, 75, 70), 'sympy.combinatorics.perm_groups.PermutationGroup', 'PermutationGroup', ({(75, 62, 75, 69): '[other]'}, {}), '([other])', False, 'from sympy.combinatorics.perm_groups import PermutationGroup\n'), ((66, 44, 66, 72), 'sympy.combinatorics.permutations.Permutation._af_new', 'Permutation._af_new', ({(66, 64, 66, 71): 'element'}, {}), '(element)', False, 'from sympy.combinatorics.permutations import Permutation, _af_rmul\n')]
jonkeane/vatic-checker
src/vatic_checker/config.py
fa8aec6946dcfd3f466b62f9c00d81bc43514b22
localhost = "http://localhost/" # your local host database = "mysql://root@localhost/vaticChecker" # server://user:pass@localhost/dbname min_training = 2 # the minimum number of training videos to be considered recaptcha_secret = "" # recaptcha secret for verification duplicate_annotations = False # Should the server allow for duplicate annotations? import os.path import sys sys.path.append(os.path.dirname(os.path.abspath(__file__))) # TODO: remove on server import os os.environ['PYTHON_EGG_CACHE'] = '/tmp/apache'
[((9, 32, 9, 57), 'os.path.abspath', 'os.path.abspath', ({(9, 48, 9, 56): '__file__'}, {}), '(__file__)', False, 'import os\n')]
graingert/django
django/utils/timezone.py
784d0c261c76535dc760bc8d76793d92f35c1513
"""Timezone helper functions. This module uses pytz when it's available and fallbacks when it isn't. """ from datetime import datetime, timedelta, tzinfo from threading import local import time as _time try: import pytz except ImportError: pytz = None from django.conf import settings __all__ = [ 'utc', 'get_default_timezone', 'get_current_timezone', 'activate', 'deactivate', 'override', 'is_naive', 'is_aware', 'make_aware', 'make_naive', ] # UTC and local time zones ZERO = timedelta(0) class UTC(tzinfo): """ UTC implementation taken from Python's docs. Used only when pytz isn't available. """ def __repr__(self): return "<UTC>" def utcoffset(self, dt): return ZERO def tzname(self, dt): return "UTC" def dst(self, dt): return ZERO class LocalTimezone(tzinfo): """ Local time implementation taken from Python's docs. Used only when pytz isn't available, and most likely inaccurate. If you're having trouble with this class, don't waste your time, just install pytz. """ def __init__(self): # This code is moved in __init__ to execute it as late as possible # See get_default_timezone(). self.STDOFFSET = timedelta(seconds=-_time.timezone) if _time.daylight: self.DSTOFFSET = timedelta(seconds=-_time.altzone) else: self.DSTOFFSET = self.STDOFFSET self.DSTDIFF = self.DSTOFFSET - self.STDOFFSET tzinfo.__init__(self) def __repr__(self): return "<LocalTimezone>" def utcoffset(self, dt): if self._isdst(dt): return self.DSTOFFSET else: return self.STDOFFSET def dst(self, dt): if self._isdst(dt): return self.DSTDIFF else: return ZERO def tzname(self, dt): return _time.tzname[self._isdst(dt)] def _isdst(self, dt): tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, 0) stamp = _time.mktime(tt) tt = _time.localtime(stamp) return tt.tm_isdst > 0 utc = pytz.utc if pytz else UTC() """UTC time zone as a tzinfo instance.""" # In order to avoid accessing the settings at compile time, # wrap the expression in a function and cache the result. _localtime = None def get_default_timezone(): """ Returns the default time zone as a tzinfo instance. This is the time zone defined by settings.TIME_ZONE. See also :func:`get_current_timezone`. """ global _localtime if _localtime is None: if isinstance(settings.TIME_ZONE, basestring) and pytz is not None: _localtime = pytz.timezone(settings.TIME_ZONE) else: _localtime = LocalTimezone() return _localtime # This function exists for consistency with get_current_timezone_name def get_default_timezone_name(): """ Returns the name of the default time zone. """ return _get_timezone_name(get_default_timezone()) _active = local() def get_current_timezone(): """ Returns the currently active time zone as a tzinfo instance. """ return getattr(_active, "value", get_default_timezone()) def get_current_timezone_name(): """ Returns the name of the currently active time zone. """ return _get_timezone_name(get_current_timezone()) def _get_timezone_name(timezone): """ Returns the name of ``timezone``. """ try: # for pytz timezones return timezone.zone except AttributeError: # for regular tzinfo objects local_now = datetime.now(timezone) return timezone.tzname(local_now) # Timezone selection functions. # These functions don't change os.environ['TZ'] and call time.tzset() # because it isn't thread safe. def activate(timezone): """ Sets the time zone for the current thread. The ``timezone`` argument must be an instance of a tzinfo subclass or a time zone name. If it is a time zone name, pytz is required. """ if isinstance(timezone, tzinfo): _active.value = timezone elif isinstance(timezone, basestring) and pytz is not None: _active.value = pytz.timezone(timezone) else: raise ValueError("Invalid timezone: %r" % timezone) def deactivate(): """ Unsets the time zone for the current thread. Django will then use the time zone defined by settings.TIME_ZONE. """ if hasattr(_active, "value"): del _active.value class override(object): """ Temporarily set the time zone for the current thread. This is a context manager that uses ``~django.utils.timezone.activate()`` to set the timezone on entry, and restores the previously active timezone on exit. The ``timezone`` argument must be an instance of a ``tzinfo`` subclass, a time zone name, or ``None``. If is it a time zone name, pytz is required. If it is ``None``, Django enables the default time zone. """ def __init__(self, timezone): self.timezone = timezone self.old_timezone = getattr(_active, 'value', None) def __enter__(self): if self.timezone is None: deactivate() else: activate(self.timezone) def __exit__(self, exc_type, exc_value, traceback): if self.old_timezone is not None: _active.value = self.old_timezone else: del _active.value # Templates def template_localtime(value, use_tz=None): """ Checks if value is a datetime and converts it to local time if necessary. If use_tz is provided and is not None, that will force the value to be converted (or not), overriding the value of settings.USE_TZ. This function is designed for use by the template engine. """ should_convert = (isinstance(value, datetime) and (settings.USE_TZ if use_tz is None else use_tz) and not is_naive(value) and getattr(value, 'convert_to_local_time', True)) return localtime(value) if should_convert else value # Utilities def localtime(value, timezone=None): """ Converts an aware datetime.datetime to local time. Local time is defined by the current time zone, unless another time zone is specified. """ if timezone is None: timezone = get_current_timezone() value = value.astimezone(timezone) if hasattr(timezone, 'normalize'): # available for pytz time zones value = timezone.normalize(value) return value def now(): """ Returns an aware or naive datetime.datetime, depending on settings.USE_TZ. """ if settings.USE_TZ: # timeit shows that datetime.now(tz=utc) is 24% slower return datetime.utcnow().replace(tzinfo=utc) else: return datetime.now() # By design, these four functions don't perform any checks on their arguments. # The caller should ensure that they don't receive an invalid value like None. def is_aware(value): """ Determines if a given datetime.datetime is aware. The logic is described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo """ return value.tzinfo is not None and value.tzinfo.utcoffset(value) is not None def is_naive(value): """ Determines if a given datetime.datetime is naive. The logic is described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo """ return value.tzinfo is None or value.tzinfo.utcoffset(value) is None def make_aware(value, timezone): """ Makes a naive datetime.datetime in a given time zone aware. """ if hasattr(timezone, 'localize'): # available for pytz time zones return timezone.localize(value, is_dst=None) else: # may be wrong around DST changes return value.replace(tzinfo=timezone) def make_naive(value, timezone): """ Makes an aware datetime.datetime naive in a given time zone. """ value = value.astimezone(timezone) if hasattr(timezone, 'normalize'): # available for pytz time zones value = timezone.normalize(value) return value.replace(tzinfo=None)
[((26, 7, 26, 19), 'datetime.timedelta', 'timedelta', ({(26, 17, 26, 18): '0'}, {}), '(0)', False, 'from datetime import datetime, timedelta, tzinfo\n'), ((123, 10, 123, 17), 'threading.local', 'local', ({}, {}), '()', False, 'from threading import local\n'), ((58, 25, 58, 59), 'datetime.timedelta', 'timedelta', (), '', False, 'from datetime import datetime, timedelta, tzinfo\n'), ((64, 8, 64, 29), 'datetime.tzinfo.__init__', 'tzinfo.__init__', ({(64, 24, 64, 28): 'self'}, {}), '(self)', False, 'from datetime import datetime, timedelta, tzinfo\n'), ((88, 16, 88, 32), 'time.mktime', '_time.mktime', ({(88, 29, 88, 31): 'tt'}, {}), '(tt)', True, 'import time as _time\n'), ((89, 13, 89, 35), 'time.localtime', '_time.localtime', ({(89, 29, 89, 34): 'stamp'}, {}), '(stamp)', True, 'import time as _time\n'), ((249, 15, 249, 29), 'datetime.datetime.now', 'datetime.now', ({}, {}), '()', False, 'from datetime import datetime, timedelta, tzinfo\n'), ((60, 29, 60, 62), 'datetime.timedelta', 'timedelta', (), '', False, 'from datetime import datetime, timedelta, tzinfo\n'), ((111, 25, 111, 58), 'pytz.timezone', 'pytz.timezone', ({(111, 39, 111, 57): 'settings.TIME_ZONE'}, {}), '(settings.TIME_ZONE)', False, 'import pytz\n'), ((146, 20, 146, 42), 'datetime.datetime.now', 'datetime.now', ({(146, 33, 146, 41): 'timezone'}, {}), '(timezone)', False, 'from datetime import datetime, timedelta, tzinfo\n'), ((164, 24, 164, 47), 'pytz.timezone', 'pytz.timezone', ({(164, 38, 164, 46): 'timezone'}, {}), '(timezone)', False, 'import pytz\n'), ((247, 15, 247, 32), 'datetime.datetime.utcnow', 'datetime.utcnow', ({}, {}), '()', False, 'from datetime import datetime, timedelta, tzinfo\n')]
Ouranosinc/malleefowl
malleefowl/tests/test_wps_caps.py
685a4cabe4c4ccafc2721a50e1f8178b8b81689e
import pytest from pywps import Service from pywps.tests import assert_response_success from .common import client_for from malleefowl.processes import processes def test_wps_caps(): client = client_for(Service(processes=processes)) resp = client.get(service='wps', request='getcapabilities', version='1.0.0') names = resp.xpath_text('/wps:Capabilities' '/wps:ProcessOfferings' '/wps:Process' '/ows:Identifier') assert sorted(names.split()) == [ 'download', 'esgsearch', 'thredds_download', 'workflow' ]
[((11, 24, 11, 52), 'pywps.Service', 'Service', (), '', False, 'from pywps import Service\n')]
CallumJHays/pyngrok
setup.py
e1a28948d1d8cf42f8eed1b166a2caf6b2a68066
from setuptools import setup __author__ = "Alex Laird" __copyright__ = "Copyright 2019, Alex Laird" __version__ = "1.4.0" with open("README.md", "r") as f: long_description = f.read() setup( name="pyngrok", version=__version__, packages=["pyngrok"], python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", install_requires=[ "future", "pyyaml" ], entry_points=""" [console_scripts] ngrok=pyngrok.ngrok:run """, description="A Python wrapper for Ngrok.", long_description=long_description, long_description_content_type="text/markdown", author="Alex Laird", author_email="[email protected]", url="https://github.com/alexdlaird/pyngrok", download_url="https://github.com/alexdlaird/pyngrok/archive/{}.tar.gz".format(__version__), keywords=["ngrok", "tunnel", "tunneling", "webhook", "localhost"], license="MIT", classifiers=[ "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Software Development :: Libraries :: Python Modules", "Environment :: Console", "Environment :: Web Environment", "Intended Audience :: Developers", "Intended Audience :: Education", "Development Status :: 5 - Production/Stable", "License :: OSI Approved :: MIT License", "Operating System :: MacOS", "Operating System :: Microsoft :: Windows", "Operating System :: POSIX", "Operating System :: Unix" ] )
[]
texasmichelle/kubeflow-cern
pipelines/trackml.py
886925fad5c37a72f6999c1100584fa8e4a0adae
#!/usr/bin/env python3 import kfp.dsl as dsl import kfp.gcp as gcp # Pipeline input variables. KUBECTL_IMAGE = "gcr.io/mcas-195423/trackml_master_kfp_kubectl" KUBECTL_IMAGE_VERSION = "1" TRACKML_IMAGE = "gcr.io/mcas-195423/trackml_master_trackml" TRACKML_IMAGE_VERSION = "1" def train_op(): return dsl.ContainerOp( name='train', image="{}:{}".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=["python"], arguments=["train.py"], ).apply(gcp.use_gcp_secret() )#.set_gpu_limit(1) def serve_op(): return dsl.ContainerOp( name='serve', image="{}:{}".format(KUBECTL_IMAGE, KUBECTL_IMAGE_VERSION), arguments=[ "/src/set_kubectl.sh", "--namespace", "kubeflow", "--command", "apply -f /src/k8s/serve.yaml", ] ).apply(gcp.use_gcp_secret()) def resultsgen_op(): return dsl.ContainerOp( name='resultsgen', image="{}:{}".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=["python"], arguments=["resultsgen.py"], ).apply(gcp.use_gcp_secret()) @dsl.pipeline( name='trackml', description='A pipeline that predicts particle tracks' ) def trackml(): train = train_op() serve = serve_op() serve.after(train) resultsgen = resultsgen_op() resultsgen.after(serve) if __name__ == '__main__': import kfp.compiler as compiler compiler.Compiler().compile(trackml, __file__ + '.tar.gz')
[((40, 1, 43, 1), 'kfp.dsl.pipeline', 'dsl.pipeline', (), '', True, 'import kfp.dsl as dsl\n'), ((18, 10, 18, 30), 'kfp.gcp.use_gcp_secret', 'gcp.use_gcp_secret', ({}, {}), '()', True, 'import kfp.gcp as gcp\n'), ((30, 10, 30, 30), 'kfp.gcp.use_gcp_secret', 'gcp.use_gcp_secret', ({}, {}), '()', True, 'import kfp.gcp as gcp\n'), ((38, 10, 38, 30), 'kfp.gcp.use_gcp_secret', 'gcp.use_gcp_secret', ({}, {}), '()', True, 'import kfp.gcp as gcp\n'), ((55, 2, 55, 21), 'kfp.compiler.Compiler', 'compiler.Compiler', ({}, {}), '()', True, 'import kfp.compiler as compiler\n')]
aleasoluciones/infrabbitmq
bin/ticker.py
2759590156c63b9a04fb5daf8d588a084fc30629
# -*- coding: utf-8 -*- import time import puka import argparse import logging from infcommon import utils from infrabbitmq import factory as infrabbitmq_factory from infrabbitmq.rabbitmq import RabbitMQError from infrabbitmq.events_names import ( TICK_1_SECOND, TICK_1_MINUTE, TICK_2_MINUTES, TICK_5_MINUTES, TICK_60_MINUTES, ) def publish_event(publisher, event, network, secs, mins): logging.info("publish event {} {}".format(event, secs)) publisher.publish(event, network, data={'tick': secs, 'mins': mins}) def main(network): publisher = infrabbitmq_factory.event_publisher_json_serializer() secs = 0 mins = 0 rabbitmq_exceptions = (RabbitMQError, puka.AMQPError, KeyError,) while True: time.sleep(1) secs += 1 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_SECOND, network, secs, mins) if secs % 60 == 0: mins += 1 secs = 0 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_MINUTE, network, secs, mins) if mins % 2 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_2_MINUTES, network, secs, mins) if mins % 5 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_5_MINUTES, network, secs, mins) if mins % 60 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_60_MINUTES, network, secs, mins) if __name__ == '__main__': try: parser = argparse.ArgumentParser() parser.add_argument('-n', '--network', action='store', required=True, help='Network name (ilo, c2k, ...)') args = parser.parse_args() network = args.network.split('-')[0] main(network) except Exception as exc: logging.critical("Ticker Fails: {}".format(exc))
[((27, 16, 27, 69), 'infrabbitmq.factory.event_publisher_json_serializer', 'infrabbitmq_factory.event_publisher_json_serializer', ({}, {}), '()', True, 'from infrabbitmq import factory as infrabbitmq_factory\n'), ((33, 8, 33, 21), 'time.sleep', 'time.sleep', ({(33, 19, 33, 20): '(1)'}, {}), '(1)', False, 'import time\n'), ((36, 8, 38, 58), 'infcommon.utils.do_stuff_with_exponential_backoff', 'utils.do_stuff_with_exponential_backoff', ({(36, 48, 36, 67): 'rabbitmq_exceptions', (37, 12, 37, 25): 'publish_event', (38, 12, 38, 21): 'publisher', (38, 23, 38, 36): 'TICK_1_SECOND', (38, 38, 38, 45): 'network', (38, 47, 38, 51): 'secs', (38, 53, 38, 57): 'mins'}, {}), '(rabbitmq_exceptions, publish_event,\n publisher, TICK_1_SECOND, network, secs, mins)', False, 'from infcommon import utils\n'), ((66, 17, 66, 42), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ({}, {}), '()', False, 'import argparse\n'), ((44, 12, 46, 66), 'infcommon.utils.do_stuff_with_exponential_backoff', 'utils.do_stuff_with_exponential_backoff', ({(44, 52, 44, 71): 'rabbitmq_exceptions', (45, 20, 45, 33): 'publish_event', (46, 20, 46, 29): 'publisher', (46, 31, 46, 44): 'TICK_1_MINUTE', (46, 46, 46, 53): 'network', (46, 55, 46, 59): 'secs', (46, 61, 46, 65): 'mins'}, {}), '(rabbitmq_exceptions, publish_event,\n publisher, TICK_1_MINUTE, network, secs, mins)', False, 'from infcommon import utils\n'), ((49, 16, 51, 67), 'infcommon.utils.do_stuff_with_exponential_backoff', 'utils.do_stuff_with_exponential_backoff', ({(49, 56, 49, 75): 'rabbitmq_exceptions', (50, 20, 50, 33): 'publish_event', (51, 20, 51, 29): 'publisher', (51, 31, 51, 45): 'TICK_2_MINUTES', (51, 47, 51, 54): 'network', (51, 56, 51, 60): 'secs', (51, 62, 51, 66): 'mins'}, {}), '(rabbitmq_exceptions, publish_event,\n publisher, TICK_2_MINUTES, network, secs, mins)', False, 'from infcommon import utils\n'), ((54, 16, 56, 67), 'infcommon.utils.do_stuff_with_exponential_backoff', 'utils.do_stuff_with_exponential_backoff', ({(54, 56, 54, 75): 'rabbitmq_exceptions', (55, 20, 55, 33): 'publish_event', (56, 20, 56, 29): 'publisher', (56, 31, 56, 45): 'TICK_5_MINUTES', (56, 47, 56, 54): 'network', (56, 56, 56, 60): 'secs', (56, 62, 56, 66): 'mins'}, {}), '(rabbitmq_exceptions, publish_event,\n publisher, TICK_5_MINUTES, network, secs, mins)', False, 'from infcommon import utils\n'), ((59, 16, 61, 68), 'infcommon.utils.do_stuff_with_exponential_backoff', 'utils.do_stuff_with_exponential_backoff', ({(59, 56, 59, 75): 'rabbitmq_exceptions', (60, 20, 60, 33): 'publish_event', (61, 20, 61, 29): 'publisher', (61, 31, 61, 46): 'TICK_60_MINUTES', (61, 48, 61, 55): 'network', (61, 57, 61, 61): 'secs', (61, 63, 61, 67): 'mins'}, {}), '(rabbitmq_exceptions, publish_event,\n publisher, TICK_60_MINUTES, network, secs, mins)', False, 'from infcommon import utils\n')]
Tarpelite/UniNLP
transformers/modeling_encoder_decoder.py
176c2a0f88c8054bf69e1f92693d353737367c34
# coding=utf-8 # Copyright 2018 The HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Classes to support Encoder-Decoder architectures """ from __future__ import absolute_import, division, print_function, unicode_literals import logging import os import torch from torch import nn from .modeling_auto import AutoModel, AutoModelWithLMHead logger = logging.getLogger(__name__) class PreTrainedEncoderDecoder(nn.Module): r""" :class:`~transformers.PreTrainedEncoderDecoder` is a generic model class that will be instantiated as a transformer architecture with one of the base model classes of the library as encoder and (optionally) another one as decoder when created with the `AutoModel.from_pretrained(pretrained_model_name_or_path)` class method. """ def __init__(self, encoder, decoder): super(PreTrainedEncoderDecoder, self).__init__() self.encoder = encoder self.decoder = decoder @classmethod def from_pretrained( cls, encoder_pretrained_model_name_or_path=None, decoder_pretrained_model_name_or_path=None, *model_args, **kwargs ): r""" Instantiates an encoder and a decoder from one or two base classes of the library from pre-trained model checkpoints. The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated) To train the model, you need to first set it back in training mode with `model.train()` Params: encoder_pretrained_model_name_or_path: information necessary to initiate the encoder. Either: - a string with the `shortcut name` of a pre-trained model to load from cache or download, e.g.: ``bert-base-uncased``. - a path to a `directory` containing model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/encoder``. - a path or url to a `tensorflow index checkpoint file` (e.g. `./tf_model/model.ckpt.index`). In this case, ``from_tf`` should be set to True and a configuration object should be provided as ``config`` argument. This loading path is slower than converting the TensorFlow checkpoint in a PyTorch model using the provided conversion scripts and loading the PyTorch model afterwards. decoder_pretrained_model_name_or_path: information necessary to initiate the decoder. Either: - a string with the `shortcut name` of a pre-trained model to load from cache or download, e.g.: ``bert-base-uncased``. - a path to a `directory` containing model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/decoder``. - a path or url to a `tensorflow index checkpoint file` (e.g. `./tf_model/model.ckpt.index`). In this case, ``from_tf`` should be set to True and a configuration object should be provided as ``config`` argument. This loading path is slower than converting the TensorFlow checkpoint in a PyTorch model using the provided conversion scripts and loading the PyTorch model afterwards. model_args: (`optional`) Sequence of positional arguments: All remaning positional arguments will be passed to the underlying model's ``__init__`` method config: (`optional`) instance of a class derived from :class:`~transformers.PretrainedConfig`: Configuration for the model to use instead of an automatically loaded configuation. Configuration can be automatically loaded when: - the model is a model provided by the library (loaded with the ``shortcut-name`` string of a pretrained model), or - the model was saved using :func:`~transformers.PreTrainedModel.save_pretrained` and is reloaded by suppling the save directory. - the model is loaded by suppling a local directory as ``pretrained_model_name_or_path`` and a configuration JSON file named `config.json` is found in the directory. state_dict: (`optional`) dict: an optional state dictionnary for the model to use instead of a state dictionary loaded from saved weights file. This option can be used if you want to create a model from a pretrained configuration but load your own weights. In this case though, you should check if using :func:`~transformers.PreTrainedModel.save_pretrained` and :func:`~transformers.PreTrainedModel.from_pretrained` is not a simpler option. cache_dir: (`optional`) string: Path to a directory in which a downloaded pre-trained model configuration should be cached if the standard cache should not be used. force_download: (`optional`) boolean, default False: Force to (re-)download the model weights and configuration files and override the cached versions if they exists. proxies: (`optional`) dict, default None: A dictionary of proxy servers to use by protocol or endpoint, e.g.: {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}. The proxies are used on each request. output_loading_info: (`optional`) boolean: Set to ``True`` to also return a dictionnary containing missing keys, unexpected keys and error messages. kwargs: (`optional`) Remaining dictionary of keyword arguments. Can be used to update the configuration object (after it being loaded) and initiate the model. (e.g. ``output_attention=True``). Behave differently depending on whether a `config` is provided or automatically loaded: - If a configuration is provided with ``config``, ``**kwargs`` will be directly passed to the underlying model's ``__init__`` method (we assume all relevant updates to the configuration have already been done) - If a configuration is not provided, ``kwargs`` will be first passed to the configuration class initialization function (:func:`~transformers.PretrainedConfig.from_pretrained`). Each key of ``kwargs`` that corresponds to a configuration attribute will be used to override said attribute with the supplied ``kwargs`` value. Remaining keys that do not correspond to any configuration attribute will be passed to the underlying model's ``__init__`` function. You can specify kwargs sepcific for the encoder and decoder by prefixing the key with `encoder_` and `decoder_` respectively. (e.g. ``decoder_output_attention=True``). The remaining kwargs will be passed to both encoders and decoders. Examples:: model = PreTrainedEncoderDecoder.from_pretained('bert-base-uncased', 'bert-base-uncased') # initialize Bert2Bert """ # keyword arguments come in 3 flavors: encoder-specific (prefixed by # `encoder_`), decoder-specific (prefixed by `decoder_`) and those # that apply to the model as a whole. # We let the specific kwargs override the common ones in case of conflict. kwargs_common = { argument: value for argument, value in kwargs.items() if not argument.startswith("encoder_") and not argument.startswith("decoder_") } kwargs_decoder = kwargs_common.copy() kwargs_encoder = kwargs_common.copy() kwargs_encoder.update( { argument[len("encoder_") :]: value for argument, value in kwargs.items() if argument.startswith("encoder_") } ) kwargs_decoder.update( { argument[len("decoder_") :]: value for argument, value in kwargs.items() if argument.startswith("decoder_") } ) # Load and initialize the encoder and decoder # The distinction between encoder and decoder at the model level is made # by the value of the flag `is_decoder` that we need to set correctly. encoder = kwargs_encoder.pop("model", None) if encoder is None: encoder = AutoModel.from_pretrained( encoder_pretrained_model_name_or_path, *model_args, **kwargs_encoder ) encoder.config.is_decoder = False decoder = kwargs_decoder.pop("model", None) if decoder is None: decoder = AutoModelWithLMHead.from_pretrained( decoder_pretrained_model_name_or_path, **kwargs_decoder ) decoder.config.is_decoder = True model = cls(encoder, decoder) return model def save_pretrained(self, save_directory): """ Save a Seq2Seq model and its configuration file in a format such that it can be loaded using `:func:`~transformers.PreTrainedEncoderDecoder.from_pretrained` We save the encoder' and decoder's parameters in two separate directories. """ self.encoder.save_pretrained(os.path.join(save_directory, "encoder")) self.decoder.save_pretrained(os.path.join(save_directory, "decoder")) def forward(self, encoder_input_ids, decoder_input_ids, **kwargs): """ The forward pass on a seq2eq depends what we are performing: - During training we perform one forward pass through both the encoder and decoder; - During prediction, we perform one forward pass through the encoder, and then perform several forward passes with the encoder's hidden state through the decoder to decode a full sequence. Therefore, we skip the forward pass on the encoder if an argument named `encoder_hidden_state` is passed to this function. Params: encoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices of encoder input sequence tokens in the vocabulary. decoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices of decoder input sequence tokens in the vocabulary. kwargs: (`optional`) Remaining dictionary of keyword arguments. """ # keyword arguments come in 3 flavors: encoder-specific (prefixed by # `encoder_`), decoder-specific (prefixed by `decoder_`) and those # that apply to the model as whole. # We let the specific kwargs override the common ones in case of conflict. kwargs_common = { argument: value for argument, value in kwargs.items() if not argument.startswith("encoder_") and not argument.startswith("decoder_") } kwargs_decoder = kwargs_common.copy() kwargs_encoder = kwargs_common.copy() kwargs_encoder.update( { argument[len("encoder_") :]: value for argument, value in kwargs.items() if argument.startswith("encoder_") } ) kwargs_decoder.update( { argument[len("decoder_") :]: value for argument, value in kwargs.items() if argument.startswith("decoder_") } ) # Encode if needed (training, first prediction pass) encoder_hidden_states = kwargs_encoder.pop("hidden_states", None) if encoder_hidden_states is None: encoder_outputs = self.encoder(encoder_input_ids, **kwargs_encoder) encoder_hidden_states = encoder_outputs[ 0 ] # output the last layer hidden state else: encoder_outputs = () # Decode kwargs_decoder["encoder_hidden_states"] = encoder_hidden_states kwargs_decoder["encoder_attention_mask"] = kwargs_encoder.get( "attention_mask", None ) decoder_outputs = self.decoder(decoder_input_ids, **kwargs_decoder) return decoder_outputs + encoder_outputs class Model2Model(PreTrainedEncoderDecoder): r""" :class:`~transformers.Model2Model` instantiates a Seq2Seq2 model where both of the encoder and decoder are of the same family. If the name of or that path to a pretrained model is specified the encoder and the decoder will be initialized with the pretrained weight (the cross-attention will be intialized randomly if its weights are not present). It is possible to override this behavior and initialize, say, the decoder randomly by creating it beforehand as follows config = BertConfig.from_pretrained() decoder = BertForMaskedLM(config) model = Model2Model.from_pretrained('bert-base-uncased', decoder_model=decoder) """ def __init__(self, *args, **kwargs): super(Model2Model, self).__init__(*args, **kwargs) self.tie_weights() def tie_weights(self): """ Tying the encoder and decoders' embeddings together. We need for each to get down to the embedding weights. However the different model classes are inconsistent to that respect: - BertModel: embeddings.word_embeddings - RoBERTa: embeddings.word_embeddings - XLMModel: embeddings - GPT2: wte - BertForMaskedLM: bert.embeddings.word_embeddings - RobertaForMaskedLM: roberta.embeddings.word_embeddings argument of the XEmbedding layer for each model, but it is "blocked" by a model-specific keyword (bert, )... """ # self._tie_or_clone_weights(self.encoder, self.decoder) pass @classmethod def from_pretrained(cls, pretrained_model_name_or_path, *args, **kwargs): if ( "bert" not in pretrained_model_name_or_path or "roberta" in pretrained_model_name_or_path or "distilbert" in pretrained_model_name_or_path ): raise ValueError("Only the Bert model is currently supported.") model = super(Model2Model, cls).from_pretrained( encoder_pretrained_model_name_or_path=pretrained_model_name_or_path, decoder_pretrained_model_name_or_path=pretrained_model_name_or_path, *args, **kwargs ) return model class Model2LSTM(PreTrainedEncoderDecoder): @classmethod def from_pretrained(cls, *args, **kwargs): if kwargs.get("decoder_model", None) is None: # We will create a randomly initilized LSTM model as decoder if "decoder_config" not in kwargs: raise ValueError( "To load an LSTM in Encoder-Decoder model, please supply either: " " - a torch.nn.LSTM model as `decoder_model` parameter (`decoder_model=lstm_model`), or" " - a dictionary of configuration parameters that will be used to initialize a" " torch.nn.LSTM model as `decoder_config` keyword argument. " " E.g. `decoder_config={'input_size': 768, 'hidden_size': 768, 'num_layers': 2}`" ) kwargs["decoder_model"] = torch.nn.LSTM(kwargs.pop("decoder_config")) model = super(Model2LSTM, cls).from_pretrained(*args, **kwargs) return model
[((27, 9, 27, 36), 'logging.getLogger', 'logging.getLogger', ({(27, 27, 27, 35): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((167, 37, 167, 76), 'os.path.join', 'os.path.join', ({(167, 50, 167, 64): 'save_directory', (167, 66, 167, 75): '"""encoder"""'}, {}), "(save_directory, 'encoder')", False, 'import os\n'), ((168, 37, 168, 76), 'os.path.join', 'os.path.join', ({(168, 50, 168, 64): 'save_directory', (168, 66, 168, 75): '"""decoder"""'}, {}), "(save_directory, 'decoder')", False, 'import os\n')]